From 5f8887dd0346454c68b1b0d031e210bad0526587 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 2 Mar 2016 22:30:13 -0800 Subject: [PATCH 001/689] ENH: Add parser for Siemens "ASCCONV" text format This format is included in most Siemens DICOM files as well as other MR related files. It contains many important bits of meta data. --- nibabel/nicom/ascconv.py | 60 ++ nibabel/nicom/tests/data/ascconv_sample.txt | 919 ++++++++++++++++++++ nibabel/nicom/tests/test_ascconv.py | 40 + 3 files changed, 1019 insertions(+) create mode 100644 nibabel/nicom/ascconv.py create mode 100644 nibabel/nicom/tests/data/ascconv_sample.txt create mode 100644 nibabel/nicom/tests/test_ascconv.py diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py new file mode 100644 index 0000000000..b63205b2cb --- /dev/null +++ b/nibabel/nicom/ascconv.py @@ -0,0 +1,60 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" +Parse the "ASCCONV" meta data format found in a variety of Siemens MR files. +""" +import ast, re +from ..externals import OrderedDict + + +ASCCONV_RE = re.compile( + r'### ASCCONV BEGIN((?:\s*[^=\s]+=[^=\s]+)*) ###\n(.*?)\n### ASCCONV END ###', + flags=re.M | re.S) + + +def parse_ascconv(csa_key, ascconv_str): + '''Parse the 'ASCCONV' format from `input_str`. + + Parameters + ---------- + csa_key : str + The key in the CSA dict for the element containing `input_str`. Should + be 'MrPheonixProtocol' or 'MrProtocol'. + ascconv_str : str + The string we are parsing + + Returns + ------- + prot_dict : OrderedDict + Meta data pulled from the ASCCONV section. + attrs : OrderedDict + Any attributes stored in the 'ASCCONV BEGIN' line + + Raises + ------ + SyntaxError + A line of the ASCCONV section could not be parsed. + ''' + attrs, content = ASCCONV_RE.match(ascconv_str).groups() + attrs = OrderedDict((tuple(x.split('=')) for x in attrs.split())) + if csa_key == 'MrPhoenixProtocol': + str_delim = '""' + elif csa_key == 'MrProtocol': + str_delim = '"' + else: + raise ValueError('Unknown protocol key: %s' % csa_key) + # Normalize string start / end markers to something Python understands + content = content.replace(str_delim, '"""') + ascconv_lines = content.split('\n') + # Use Python's own parser to parse modified ASCCONV assignments + tree = ast.parse(content) + + result = OrderedDict() + for statement in tree.body: + assert isinstance(statement, ast.Assign) + value = ast.literal_eval(statement.value) + # Get LHS string from corresponding text line + key = ascconv_lines[statement.lineno - 1].split('=')[0].strip() + result[key] = value + + return result, attrs diff --git a/nibabel/nicom/tests/data/ascconv_sample.txt b/nibabel/nicom/tests/data/ascconv_sample.txt new file mode 100644 index 0000000000..1fd78f788f --- /dev/null +++ b/nibabel/nicom/tests/data/ascconv_sample.txt @@ -0,0 +1,919 @@ +### ASCCONV BEGIN ### +ulVersion = 0x14b44b6 +tSequenceFileName = ""%SiemensSeq%\ep2d_diff"" +tProtocolName = ""CBU+AF8-DTI+AF8-64D+AF8-1A"" +tReferenceImage0 = ""1.3.12.2.1107.5.2.32.35119.2010011420070434054586384"" +tReferenceImage1 = ""1.3.12.2.1107.5.2.32.35119.2010011420070721803086388"" +tReferenceImage2 = ""1.3.12.2.1107.5.2.32.35119.201001142007109937386392"" +ucScanRegionPosValid = 0x1 +ucTablePositioningMode = 0x1 +sProtConsistencyInfo.tBaselineString = ""N4_VB17A_LATEST_20090307"" +sProtConsistencyInfo.tSystemType = ""092"" +sProtConsistencyInfo.flNominalB0 = 2.89362 +sProtConsistencyInfo.flGMax = 26 +sProtConsistencyInfo.flRiseTime = 5.88 +sProtConsistencyInfo.lMaximumNofRxReceiverChannels = 18 +sGRADSPEC.sEddyCompensationX.aflAmplitude[0] = 0.00141208 +sGRADSPEC.sEddyCompensationX.aflAmplitude[1] = 0.000569241 +sGRADSPEC.sEddyCompensationX.aflAmplitude[2] = -0.000514958 +sGRADSPEC.sEddyCompensationX.aflAmplitude[3] = 0.000499075 +sGRADSPEC.sEddyCompensationX.aflAmplitude[4] = 0.000821246 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[0] = 1.81531 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[1] = 0.995025 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[2] = 0.0492598 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[3] = 0.0194645 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[4] = 0.000499659 +sGRADSPEC.sEddyCompensationY.aflAmplitude[0] = 0.00112797 +sGRADSPEC.sEddyCompensationY.aflAmplitude[1] = -0.000565372 +sGRADSPEC.sEddyCompensationY.aflAmplitude[2] = -0.00182913 +sGRADSPEC.sEddyCompensationY.aflAmplitude[3] = -2.65859e-005 +sGRADSPEC.sEddyCompensationY.aflAmplitude[4] = 0.000601077 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[0] = 1.09142 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[1] = 0.661632 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[2] = 0.446457 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[3] = 0.0118729 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[4] = 0.00134346 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[0] = 0.00221038 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[1] = 0.00592667 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[2] = 0.000254437 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[3] = -8.35135e-005 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[4] = -4.25678e-005 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[0] = 4.32108 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[1] = 0.923398 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[2] = 0.0379209 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[3] = 0.0104227 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[4] = 0.00199944 +sGRADSPEC.bEddyCompensationValid = 1 +sGRADSPEC.sB0CompensationX.aflAmplitude[0] = -0.0494045 +sGRADSPEC.sB0CompensationX.aflAmplitude[1] = 0.0730311 +sGRADSPEC.sB0CompensationX.aflAmplitude[2] = -0.00670347 +sGRADSPEC.sB0CompensationX.aflTimeConstant[0] = 0.618983 +sGRADSPEC.sB0CompensationX.aflTimeConstant[1] = 0.341914 +sGRADSPEC.sB0CompensationX.aflTimeConstant[2] = 0.002 +sGRADSPEC.sB0CompensationY.aflAmplitude[0] = 0.136281 +sGRADSPEC.sB0CompensationY.aflAmplitude[1] = 0.0376382 +sGRADSPEC.sB0CompensationY.aflAmplitude[2] = -0.0500779 +sGRADSPEC.sB0CompensationY.aflTimeConstant[0] = 0.71999 +sGRADSPEC.sB0CompensationY.aflTimeConstant[1] = 0.00341892 +sGRADSPEC.sB0CompensationY.aflTimeConstant[2] = 0.002 +sGRADSPEC.sB0CompensationZ.aflAmplitude[0] = 0.0776537 +sGRADSPEC.sB0CompensationZ.aflAmplitude[1] = 0.0168151 +sGRADSPEC.sB0CompensationZ.aflAmplitude[2] = -0.0550622 +sGRADSPEC.sB0CompensationZ.aflTimeConstant[0] = 0.669998 +sGRADSPEC.sB0CompensationZ.aflTimeConstant[1] = 0.0213343 +sGRADSPEC.sB0CompensationZ.aflTimeConstant[2] = 0.00186002 +sGRADSPEC.bB0CompensationValid = 1 +sGRADSPEC.sCrossTermCompensationXY.aflAmplitude[0] = -0.00049613 +sGRADSPEC.sCrossTermCompensationXY.aflTimeConstant[0] = 0.562233 +sGRADSPEC.sCrossTermCompensationXZ.aflAmplitude[0] = -0.000499641 +sGRADSPEC.sCrossTermCompensationXZ.aflTimeConstant[0] = 0.693605 +sGRADSPEC.sCrossTermCompensationYX.aflAmplitude[0] = 5.35458e-005 +sGRADSPEC.sCrossTermCompensationYX.aflTimeConstant[0] = 0.598216 +sGRADSPEC.sCrossTermCompensationYZ.aflAmplitude[0] = 0.0004678 +sGRADSPEC.sCrossTermCompensationYZ.aflTimeConstant[0] = 0.705977 +sGRADSPEC.sCrossTermCompensationZX.aflAmplitude[0] = -0.000529382 +sGRADSPEC.sCrossTermCompensationZX.aflTimeConstant[0] = 0.551175 +sGRADSPEC.sCrossTermCompensationZY.aflAmplitude[0] = 8.74925e-005 +sGRADSPEC.sCrossTermCompensationZY.aflTimeConstant[0] = 0.890761 +sGRADSPEC.bCrossTermCompensationValid = 1 +sGRADSPEC.lOffsetX = -7806 +sGRADSPEC.lOffsetY = -8833 +sGRADSPEC.lOffsetZ = -2097 +sGRADSPEC.bOffsetValid = 1 +sGRADSPEC.lDelayX = 14 +sGRADSPEC.lDelayY = 14 +sGRADSPEC.lDelayZ = 10 +sGRADSPEC.bDelayValid = 1 +sGRADSPEC.flSensitivityX = 7.95149e-005 +sGRADSPEC.flSensitivityY = 7.82833e-005 +sGRADSPEC.flSensitivityZ = 9.09015e-005 +sGRADSPEC.bSensitivityValid = 1 +sGRADSPEC.flGSWDMinRiseTime = 9.88 +sGRADSPEC.alShimCurrent[0] = 867 +sGRADSPEC.alShimCurrent[1] = 80 +sGRADSPEC.alShimCurrent[2] = -61 +sGRADSPEC.alShimCurrent[3] = -4 +sGRADSPEC.alShimCurrent[4] = -16 +sGRADSPEC.bShimCurrentValid = 1 +sGRADSPEC.ucMode = 0x11 +sTXSPEC.asNucleusInfo[0].tNucleus = ""1H"" +sTXSPEC.asNucleusInfo[0].lFrequency = 123251815 +sTXSPEC.asNucleusInfo[0].bFrequencyValid = 1 +sTXSPEC.asNucleusInfo[0].flReferenceAmplitude = 384.855 +sTXSPEC.asNucleusInfo[0].bReferenceAmplitudeValid = 1 +sTXSPEC.asNucleusInfo[0].flAmplitudeCorrection = 1 +sTXSPEC.asNucleusInfo[0].bAmplitudeCorrectionValid = 1 +sTXSPEC.asNucleusInfo[0].bRFPAIndexValid = 1 +sTXSPEC.asNucleusInfo[1].bFrequencyValid = 1 +sTXSPEC.asNucleusInfo[1].bReferenceAmplitudeValid = 1 +sTXSPEC.asNucleusInfo[1].flAmplitudeCorrection = 1 +sTXSPEC.asNucleusInfo[1].bAmplitudeCorrectionValid = 1 +sTXSPEC.asNucleusInfo[1].lRFPAIndex = -1 +sTXSPEC.asNucleusInfo[1].bRFPAIndexValid = 1 +sTXSPEC.aRFPULSE[0].tName = ""ExtExciteRF"" +sTXSPEC.aRFPULSE[0].bAmplitudeValid = 0x1 +sTXSPEC.aRFPULSE[0].flAmplitude = 357.891 +sTXSPEC.aRFPULSE[1].tName = ""CSatCSatNS"" +sTXSPEC.aRFPULSE[1].bAmplitudeValid = 0x1 +sTXSPEC.aRFPULSE[1].flAmplitude = 94.871 +sTXSPEC.aRFPULSE[2].tName = ""SLoopFCSatNS"" +sTXSPEC.aRFPULSE[2].bAmplitudeValid = 0x1 +sTXSPEC.aRFPULSE[2].flAmplitude = 94.871 +sTXSPEC.lNoOfTraPulses = 3 +sTXSPEC.lBCExcitationMode = 1 +sTXSPEC.lBCSeqExcitationMode = 4 +sTXSPEC.flKDynMagnitudeMin = 0.5 +sTXSPEC.flKDynMagnitudeMax = 1.5 +sTXSPEC.flKDynMagnitudeClipLow = 1 +sTXSPEC.flKDynMagnitudeClipHigh = 1 +sTXSPEC.flKDynPhaseMax = 0.698132 +sTXSPEC.flKDynPhaseClip = 0.174533 +sTXSPEC.bKDynValid = 1 +sTXSPEC.ucRFPulseType = 0x2 +sTXSPEC.ucExcitMode = 0x1 +sTXSPEC.ucSimultaneousExcitation = 0x1 +sTXSPEC.ucBCExcitationModeValid = 0x1 +sRXSPEC.lGain = 1 +sRXSPEC.bGainValid = 1 +sRXSPEC.alDwellTime[0] = 2800 +sAdjData.uiAdjFreMode = 0x1 +sAdjData.uiAdjShimMode = 0x2 +sAdjData.uiAdjWatSupMode = 0x1 +sAdjData.uiAdjRFMapMode = 0x1 +sAdjData.uiAdjMDSMode = 0x1 +sAdjData.uiAdjTableTolerance = 0x1 +sAdjData.uiAdjProtID = 0x56 +sAdjData.uiAdjFreProtRelated = 0x1 +sAdjData.sAdjVolume.sPosition.dCor = -19.66101724 +sAdjData.sAdjVolume.sPosition.dTra = -8.81356001 +sAdjData.sAdjVolume.sNormal.dCor = 0.005235963828 +sAdjData.sAdjVolume.sNormal.dTra = 0.9999862922 +sAdjData.sAdjVolume.dThickness = 144 +sAdjData.sAdjVolume.dPhaseFOV = 230 +sAdjData.sAdjVolume.dReadoutFOV = 230 +ucEnableNoiseAdjust = 0x1 +alTR[0] = 6600000 +alTI[0] = 2500000 +lContrasts = 1 +alTE[0] = 93000 +acFlowComp[0] = 1 +lCombinedEchoes = 1 +sSliceArray.asSlice[0].sPosition.dCor = -20.03015269 +sSliceArray.asSlice[0].sPosition.dTra = -79.31259361 +sSliceArray.asSlice[0].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[0].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[0].dThickness = 2.5 +sSliceArray.asSlice[0].dPhaseFOV = 230 +sSliceArray.asSlice[0].dReadoutFOV = 230 +sSliceArray.asSlice[1].sPosition.dCor = -20.0144448 +sSliceArray.asSlice[1].sPosition.dTra = -76.31263473 +sSliceArray.asSlice[1].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[1].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[1].dThickness = 2.5 +sSliceArray.asSlice[1].dPhaseFOV = 230 +sSliceArray.asSlice[1].dReadoutFOV = 230 +sSliceArray.asSlice[2].sPosition.dCor = -19.99873691 +sSliceArray.asSlice[2].sPosition.dTra = -73.31267586 +sSliceArray.asSlice[2].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[2].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[2].dThickness = 2.5 +sSliceArray.asSlice[2].dPhaseFOV = 230 +sSliceArray.asSlice[2].dReadoutFOV = 230 +sSliceArray.asSlice[3].sPosition.dCor = -19.98302902 +sSliceArray.asSlice[3].sPosition.dTra = -70.31271698 +sSliceArray.asSlice[3].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[3].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[3].dThickness = 2.5 +sSliceArray.asSlice[3].dPhaseFOV = 230 +sSliceArray.asSlice[3].dReadoutFOV = 230 +sSliceArray.asSlice[4].sPosition.dCor = -19.96732113 +sSliceArray.asSlice[4].sPosition.dTra = -67.3127581 +sSliceArray.asSlice[4].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[4].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[4].dThickness = 2.5 +sSliceArray.asSlice[4].dPhaseFOV = 230 +sSliceArray.asSlice[4].dReadoutFOV = 230 +sSliceArray.asSlice[5].sPosition.dCor = -19.95161324 +sSliceArray.asSlice[5].sPosition.dTra = -64.31279923 +sSliceArray.asSlice[5].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[5].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[5].dThickness = 2.5 +sSliceArray.asSlice[5].dPhaseFOV = 230 +sSliceArray.asSlice[5].dReadoutFOV = 230 +sSliceArray.asSlice[6].sPosition.dCor = -19.93590535 +sSliceArray.asSlice[6].sPosition.dTra = -61.31284035 +sSliceArray.asSlice[6].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[6].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[6].dThickness = 2.5 +sSliceArray.asSlice[6].dPhaseFOV = 230 +sSliceArray.asSlice[6].dReadoutFOV = 230 +sSliceArray.asSlice[7].sPosition.dCor = -19.92019745 +sSliceArray.asSlice[7].sPosition.dTra = -58.31288147 +sSliceArray.asSlice[7].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[7].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[7].dThickness = 2.5 +sSliceArray.asSlice[7].dPhaseFOV = 230 +sSliceArray.asSlice[7].dReadoutFOV = 230 +sSliceArray.asSlice[8].sPosition.dCor = -19.90448956 +sSliceArray.asSlice[8].sPosition.dTra = -55.3129226 +sSliceArray.asSlice[8].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[8].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[8].dThickness = 2.5 +sSliceArray.asSlice[8].dPhaseFOV = 230 +sSliceArray.asSlice[8].dReadoutFOV = 230 +sSliceArray.asSlice[9].sPosition.dCor = -19.88878167 +sSliceArray.asSlice[9].sPosition.dTra = -52.31296372 +sSliceArray.asSlice[9].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[9].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[9].dThickness = 2.5 +sSliceArray.asSlice[9].dPhaseFOV = 230 +sSliceArray.asSlice[9].dReadoutFOV = 230 +sSliceArray.asSlice[10].sPosition.dCor = -19.87307378 +sSliceArray.asSlice[10].sPosition.dTra = -49.31300484 +sSliceArray.asSlice[10].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[10].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[10].dThickness = 2.5 +sSliceArray.asSlice[10].dPhaseFOV = 230 +sSliceArray.asSlice[10].dReadoutFOV = 230 +sSliceArray.asSlice[11].sPosition.dCor = -19.85736589 +sSliceArray.asSlice[11].sPosition.dTra = -46.31304597 +sSliceArray.asSlice[11].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[11].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[11].dThickness = 2.5 +sSliceArray.asSlice[11].dPhaseFOV = 230 +sSliceArray.asSlice[11].dReadoutFOV = 230 +sSliceArray.asSlice[12].sPosition.dCor = -19.841658 +sSliceArray.asSlice[12].sPosition.dTra = -43.31308709 +sSliceArray.asSlice[12].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[12].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[12].dThickness = 2.5 +sSliceArray.asSlice[12].dPhaseFOV = 230 +sSliceArray.asSlice[12].dReadoutFOV = 230 +sSliceArray.asSlice[13].sPosition.dCor = -19.8259501 +sSliceArray.asSlice[13].sPosition.dTra = -40.31312821 +sSliceArray.asSlice[13].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[13].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[13].dThickness = 2.5 +sSliceArray.asSlice[13].dPhaseFOV = 230 +sSliceArray.asSlice[13].dReadoutFOV = 230 +sSliceArray.asSlice[14].sPosition.dCor = -19.81024221 +sSliceArray.asSlice[14].sPosition.dTra = -37.31316934 +sSliceArray.asSlice[14].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[14].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[14].dThickness = 2.5 +sSliceArray.asSlice[14].dPhaseFOV = 230 +sSliceArray.asSlice[14].dReadoutFOV = 230 +sSliceArray.asSlice[15].sPosition.dCor = -19.79453432 +sSliceArray.asSlice[15].sPosition.dTra = -34.31321046 +sSliceArray.asSlice[15].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[15].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[15].dThickness = 2.5 +sSliceArray.asSlice[15].dPhaseFOV = 230 +sSliceArray.asSlice[15].dReadoutFOV = 230 +sSliceArray.asSlice[16].sPosition.dCor = -19.77882643 +sSliceArray.asSlice[16].sPosition.dTra = -31.31325158 +sSliceArray.asSlice[16].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[16].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[16].dThickness = 2.5 +sSliceArray.asSlice[16].dPhaseFOV = 230 +sSliceArray.asSlice[16].dReadoutFOV = 230 +sSliceArray.asSlice[17].sPosition.dCor = -19.76311854 +sSliceArray.asSlice[17].sPosition.dTra = -28.31329271 +sSliceArray.asSlice[17].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[17].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[17].dThickness = 2.5 +sSliceArray.asSlice[17].dPhaseFOV = 230 +sSliceArray.asSlice[17].dReadoutFOV = 230 +sSliceArray.asSlice[18].sPosition.dCor = -19.74741065 +sSliceArray.asSlice[18].sPosition.dTra = -25.31333383 +sSliceArray.asSlice[18].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[18].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[18].dThickness = 2.5 +sSliceArray.asSlice[18].dPhaseFOV = 230 +sSliceArray.asSlice[18].dReadoutFOV = 230 +sSliceArray.asSlice[19].sPosition.dCor = -19.73170276 +sSliceArray.asSlice[19].sPosition.dTra = -22.31337495 +sSliceArray.asSlice[19].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[19].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[19].dThickness = 2.5 +sSliceArray.asSlice[19].dPhaseFOV = 230 +sSliceArray.asSlice[19].dReadoutFOV = 230 +sSliceArray.asSlice[20].sPosition.dCor = -19.71599486 +sSliceArray.asSlice[20].sPosition.dTra = -19.31341608 +sSliceArray.asSlice[20].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[20].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[20].dThickness = 2.5 +sSliceArray.asSlice[20].dPhaseFOV = 230 +sSliceArray.asSlice[20].dReadoutFOV = 230 +sSliceArray.asSlice[21].sPosition.dCor = -19.70028697 +sSliceArray.asSlice[21].sPosition.dTra = -16.3134572 +sSliceArray.asSlice[21].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[21].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[21].dThickness = 2.5 +sSliceArray.asSlice[21].dPhaseFOV = 230 +sSliceArray.asSlice[21].dReadoutFOV = 230 +sSliceArray.asSlice[22].sPosition.dCor = -19.68457908 +sSliceArray.asSlice[22].sPosition.dTra = -13.31349832 +sSliceArray.asSlice[22].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[22].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[22].dThickness = 2.5 +sSliceArray.asSlice[22].dPhaseFOV = 230 +sSliceArray.asSlice[22].dReadoutFOV = 230 +sSliceArray.asSlice[23].sPosition.dCor = -19.66887119 +sSliceArray.asSlice[23].sPosition.dTra = -10.31353945 +sSliceArray.asSlice[23].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[23].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[23].dThickness = 2.5 +sSliceArray.asSlice[23].dPhaseFOV = 230 +sSliceArray.asSlice[23].dReadoutFOV = 230 +sSliceArray.asSlice[24].sPosition.dCor = -19.6531633 +sSliceArray.asSlice[24].sPosition.dTra = -7.313580571 +sSliceArray.asSlice[24].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[24].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[24].dThickness = 2.5 +sSliceArray.asSlice[24].dPhaseFOV = 230 +sSliceArray.asSlice[24].dReadoutFOV = 230 +sSliceArray.asSlice[25].sPosition.dCor = -19.63745541 +sSliceArray.asSlice[25].sPosition.dTra = -4.313621695 +sSliceArray.asSlice[25].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[25].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[25].dThickness = 2.5 +sSliceArray.asSlice[25].dPhaseFOV = 230 +sSliceArray.asSlice[25].dReadoutFOV = 230 +sSliceArray.asSlice[26].sPosition.dCor = -19.62174752 +sSliceArray.asSlice[26].sPosition.dTra = -1.313662818 +sSliceArray.asSlice[26].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[26].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[26].dThickness = 2.5 +sSliceArray.asSlice[26].dPhaseFOV = 230 +sSliceArray.asSlice[26].dReadoutFOV = 230 +sSliceArray.asSlice[27].sPosition.dCor = -19.60603962 +sSliceArray.asSlice[27].sPosition.dTra = 1.686296059 +sSliceArray.asSlice[27].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[27].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[27].dThickness = 2.5 +sSliceArray.asSlice[27].dPhaseFOV = 230 +sSliceArray.asSlice[27].dReadoutFOV = 230 +sSliceArray.asSlice[28].sPosition.dCor = -19.59033173 +sSliceArray.asSlice[28].sPosition.dTra = 4.686254935 +sSliceArray.asSlice[28].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[28].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[28].dThickness = 2.5 +sSliceArray.asSlice[28].dPhaseFOV = 230 +sSliceArray.asSlice[28].dReadoutFOV = 230 +sSliceArray.asSlice[29].sPosition.dCor = -19.57462384 +sSliceArray.asSlice[29].sPosition.dTra = 7.686213812 +sSliceArray.asSlice[29].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[29].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[29].dThickness = 2.5 +sSliceArray.asSlice[29].dPhaseFOV = 230 +sSliceArray.asSlice[29].dReadoutFOV = 230 +sSliceArray.asSlice[30].sPosition.dCor = -19.55891595 +sSliceArray.asSlice[30].sPosition.dTra = 10.68617269 +sSliceArray.asSlice[30].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[30].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[30].dThickness = 2.5 +sSliceArray.asSlice[30].dPhaseFOV = 230 +sSliceArray.asSlice[30].dReadoutFOV = 230 +sSliceArray.asSlice[31].sPosition.dCor = -19.54320806 +sSliceArray.asSlice[31].sPosition.dTra = 13.68613156 +sSliceArray.asSlice[31].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[31].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[31].dThickness = 2.5 +sSliceArray.asSlice[31].dPhaseFOV = 230 +sSliceArray.asSlice[31].dReadoutFOV = 230 +sSliceArray.asSlice[32].sPosition.dCor = -19.52750017 +sSliceArray.asSlice[32].sPosition.dTra = 16.68609044 +sSliceArray.asSlice[32].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[32].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[32].dThickness = 2.5 +sSliceArray.asSlice[32].dPhaseFOV = 230 +sSliceArray.asSlice[32].dReadoutFOV = 230 +sSliceArray.asSlice[33].sPosition.dCor = -19.51179228 +sSliceArray.asSlice[33].sPosition.dTra = 19.68604932 +sSliceArray.asSlice[33].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[33].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[33].dThickness = 2.5 +sSliceArray.asSlice[33].dPhaseFOV = 230 +sSliceArray.asSlice[33].dReadoutFOV = 230 +sSliceArray.asSlice[34].sPosition.dCor = -19.49608438 +sSliceArray.asSlice[34].sPosition.dTra = 22.68600819 +sSliceArray.asSlice[34].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[34].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[34].dThickness = 2.5 +sSliceArray.asSlice[34].dPhaseFOV = 230 +sSliceArray.asSlice[34].dReadoutFOV = 230 +sSliceArray.asSlice[35].sPosition.dCor = -19.48037649 +sSliceArray.asSlice[35].sPosition.dTra = 25.68596707 +sSliceArray.asSlice[35].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[35].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[35].dThickness = 2.5 +sSliceArray.asSlice[35].dPhaseFOV = 230 +sSliceArray.asSlice[35].dReadoutFOV = 230 +sSliceArray.asSlice[36].sPosition.dCor = -19.4646686 +sSliceArray.asSlice[36].sPosition.dTra = 28.68592595 +sSliceArray.asSlice[36].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[36].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[36].dThickness = 2.5 +sSliceArray.asSlice[36].dPhaseFOV = 230 +sSliceArray.asSlice[36].dReadoutFOV = 230 +sSliceArray.asSlice[37].sPosition.dCor = -19.44896071 +sSliceArray.asSlice[37].sPosition.dTra = 31.68588482 +sSliceArray.asSlice[37].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[37].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[37].dThickness = 2.5 +sSliceArray.asSlice[37].dPhaseFOV = 230 +sSliceArray.asSlice[37].dReadoutFOV = 230 +sSliceArray.asSlice[38].sPosition.dCor = -19.43325282 +sSliceArray.asSlice[38].sPosition.dTra = 34.6858437 +sSliceArray.asSlice[38].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[38].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[38].dThickness = 2.5 +sSliceArray.asSlice[38].dPhaseFOV = 230 +sSliceArray.asSlice[38].dReadoutFOV = 230 +sSliceArray.asSlice[39].sPosition.dCor = -19.41754493 +sSliceArray.asSlice[39].sPosition.dTra = 37.68580258 +sSliceArray.asSlice[39].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[39].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[39].dThickness = 2.5 +sSliceArray.asSlice[39].dPhaseFOV = 230 +sSliceArray.asSlice[39].dReadoutFOV = 230 +sSliceArray.asSlice[40].sPosition.dCor = -19.40183703 +sSliceArray.asSlice[40].sPosition.dTra = 40.68576145 +sSliceArray.asSlice[40].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[40].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[40].dThickness = 2.5 +sSliceArray.asSlice[40].dPhaseFOV = 230 +sSliceArray.asSlice[40].dReadoutFOV = 230 +sSliceArray.asSlice[41].sPosition.dCor = -19.38612914 +sSliceArray.asSlice[41].sPosition.dTra = 43.68572033 +sSliceArray.asSlice[41].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[41].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[41].dThickness = 2.5 +sSliceArray.asSlice[41].dPhaseFOV = 230 +sSliceArray.asSlice[41].dReadoutFOV = 230 +sSliceArray.asSlice[42].sPosition.dCor = -19.37042125 +sSliceArray.asSlice[42].sPosition.dTra = 46.68567921 +sSliceArray.asSlice[42].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[42].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[42].dThickness = 2.5 +sSliceArray.asSlice[42].dPhaseFOV = 230 +sSliceArray.asSlice[42].dReadoutFOV = 230 +sSliceArray.asSlice[43].sPosition.dCor = -19.35471336 +sSliceArray.asSlice[43].sPosition.dTra = 49.68563808 +sSliceArray.asSlice[43].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[43].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[43].dThickness = 2.5 +sSliceArray.asSlice[43].dPhaseFOV = 230 +sSliceArray.asSlice[43].dReadoutFOV = 230 +sSliceArray.asSlice[44].sPosition.dCor = -19.33900547 +sSliceArray.asSlice[44].sPosition.dTra = 52.68559696 +sSliceArray.asSlice[44].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[44].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[44].dThickness = 2.5 +sSliceArray.asSlice[44].dPhaseFOV = 230 +sSliceArray.asSlice[44].dReadoutFOV = 230 +sSliceArray.asSlice[45].sPosition.dCor = -19.32329758 +sSliceArray.asSlice[45].sPosition.dTra = 55.68555584 +sSliceArray.asSlice[45].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[45].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[45].dThickness = 2.5 +sSliceArray.asSlice[45].dPhaseFOV = 230 +sSliceArray.asSlice[45].dReadoutFOV = 230 +sSliceArray.asSlice[46].sPosition.dCor = -19.30758969 +sSliceArray.asSlice[46].sPosition.dTra = 58.68551471 +sSliceArray.asSlice[46].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[46].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[46].dThickness = 2.5 +sSliceArray.asSlice[46].dPhaseFOV = 230 +sSliceArray.asSlice[46].dReadoutFOV = 230 +sSliceArray.asSlice[47].sPosition.dCor = -19.29188179 +sSliceArray.asSlice[47].sPosition.dTra = 61.68547359 +sSliceArray.asSlice[47].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[47].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[47].dThickness = 2.5 +sSliceArray.asSlice[47].dPhaseFOV = 230 +sSliceArray.asSlice[47].dReadoutFOV = 230 +sSliceArray.anAsc[1] = 1 +sSliceArray.anAsc[2] = 2 +sSliceArray.anAsc[3] = 3 +sSliceArray.anAsc[4] = 4 +sSliceArray.anAsc[5] = 5 +sSliceArray.anAsc[6] = 6 +sSliceArray.anAsc[7] = 7 +sSliceArray.anAsc[8] = 8 +sSliceArray.anAsc[9] = 9 +sSliceArray.anAsc[10] = 10 +sSliceArray.anAsc[11] = 11 +sSliceArray.anAsc[12] = 12 +sSliceArray.anAsc[13] = 13 +sSliceArray.anAsc[14] = 14 +sSliceArray.anAsc[15] = 15 +sSliceArray.anAsc[16] = 16 +sSliceArray.anAsc[17] = 17 +sSliceArray.anAsc[18] = 18 +sSliceArray.anAsc[19] = 19 +sSliceArray.anAsc[20] = 20 +sSliceArray.anAsc[21] = 21 +sSliceArray.anAsc[22] = 22 +sSliceArray.anAsc[23] = 23 +sSliceArray.anAsc[24] = 24 +sSliceArray.anAsc[25] = 25 +sSliceArray.anAsc[26] = 26 +sSliceArray.anAsc[27] = 27 +sSliceArray.anAsc[28] = 28 +sSliceArray.anAsc[29] = 29 +sSliceArray.anAsc[30] = 30 +sSliceArray.anAsc[31] = 31 +sSliceArray.anAsc[32] = 32 +sSliceArray.anAsc[33] = 33 +sSliceArray.anAsc[34] = 34 +sSliceArray.anAsc[35] = 35 +sSliceArray.anAsc[36] = 36 +sSliceArray.anAsc[37] = 37 +sSliceArray.anAsc[38] = 38 +sSliceArray.anAsc[39] = 39 +sSliceArray.anAsc[40] = 40 +sSliceArray.anAsc[41] = 41 +sSliceArray.anAsc[42] = 42 +sSliceArray.anAsc[43] = 43 +sSliceArray.anAsc[44] = 44 +sSliceArray.anAsc[45] = 45 +sSliceArray.anAsc[46] = 46 +sSliceArray.anAsc[47] = 47 +sSliceArray.anPos[1] = 1 +sSliceArray.anPos[2] = 2 +sSliceArray.anPos[3] = 3 +sSliceArray.anPos[4] = 4 +sSliceArray.anPos[5] = 5 +sSliceArray.anPos[6] = 6 +sSliceArray.anPos[7] = 7 +sSliceArray.anPos[8] = 8 +sSliceArray.anPos[9] = 9 +sSliceArray.anPos[10] = 10 +sSliceArray.anPos[11] = 11 +sSliceArray.anPos[12] = 12 +sSliceArray.anPos[13] = 13 +sSliceArray.anPos[14] = 14 +sSliceArray.anPos[15] = 15 +sSliceArray.anPos[16] = 16 +sSliceArray.anPos[17] = 17 +sSliceArray.anPos[18] = 18 +sSliceArray.anPos[19] = 19 +sSliceArray.anPos[20] = 20 +sSliceArray.anPos[21] = 21 +sSliceArray.anPos[22] = 22 +sSliceArray.anPos[23] = 23 +sSliceArray.anPos[24] = 24 +sSliceArray.anPos[25] = 25 +sSliceArray.anPos[26] = 26 +sSliceArray.anPos[27] = 27 +sSliceArray.anPos[28] = 28 +sSliceArray.anPos[29] = 29 +sSliceArray.anPos[30] = 30 +sSliceArray.anPos[31] = 31 +sSliceArray.anPos[32] = 32 +sSliceArray.anPos[33] = 33 +sSliceArray.anPos[34] = 34 +sSliceArray.anPos[35] = 35 +sSliceArray.anPos[36] = 36 +sSliceArray.anPos[37] = 37 +sSliceArray.anPos[38] = 38 +sSliceArray.anPos[39] = 39 +sSliceArray.anPos[40] = 40 +sSliceArray.anPos[41] = 41 +sSliceArray.anPos[42] = 42 +sSliceArray.anPos[43] = 43 +sSliceArray.anPos[44] = 44 +sSliceArray.anPos[45] = 45 +sSliceArray.anPos[46] = 46 +sSliceArray.anPos[47] = 47 +sSliceArray.lSize = 48 +sSliceArray.lConc = 1 +sSliceArray.ucMode = 0x2 +sSliceArray.sTSat.dThickness = 50 +sGroupArray.asGroup[0].nSize = 48 +sGroupArray.asGroup[0].dDistFact = 0.2 +sGroupArray.anMember[1] = 1 +sGroupArray.anMember[2] = 2 +sGroupArray.anMember[3] = 3 +sGroupArray.anMember[4] = 4 +sGroupArray.anMember[5] = 5 +sGroupArray.anMember[6] = 6 +sGroupArray.anMember[7] = 7 +sGroupArray.anMember[8] = 8 +sGroupArray.anMember[9] = 9 +sGroupArray.anMember[10] = 10 +sGroupArray.anMember[11] = 11 +sGroupArray.anMember[12] = 12 +sGroupArray.anMember[13] = 13 +sGroupArray.anMember[14] = 14 +sGroupArray.anMember[15] = 15 +sGroupArray.anMember[16] = 16 +sGroupArray.anMember[17] = 17 +sGroupArray.anMember[18] = 18 +sGroupArray.anMember[19] = 19 +sGroupArray.anMember[20] = 20 +sGroupArray.anMember[21] = 21 +sGroupArray.anMember[22] = 22 +sGroupArray.anMember[23] = 23 +sGroupArray.anMember[24] = 24 +sGroupArray.anMember[25] = 25 +sGroupArray.anMember[26] = 26 +sGroupArray.anMember[27] = 27 +sGroupArray.anMember[28] = 28 +sGroupArray.anMember[29] = 29 +sGroupArray.anMember[30] = 30 +sGroupArray.anMember[31] = 31 +sGroupArray.anMember[32] = 32 +sGroupArray.anMember[33] = 33 +sGroupArray.anMember[34] = 34 +sGroupArray.anMember[35] = 35 +sGroupArray.anMember[36] = 36 +sGroupArray.anMember[37] = 37 +sGroupArray.anMember[38] = 38 +sGroupArray.anMember[39] = 39 +sGroupArray.anMember[40] = 40 +sGroupArray.anMember[41] = 41 +sGroupArray.anMember[42] = 42 +sGroupArray.anMember[43] = 43 +sGroupArray.anMember[44] = 44 +sGroupArray.anMember[45] = 45 +sGroupArray.anMember[46] = 46 +sGroupArray.anMember[47] = 47 +sGroupArray.anMember[48] = -1 +sGroupArray.lSize = 1 +sGroupArray.sPSat.dThickness = 50 +sGroupArray.sPSat.dGap = 10 +sAutoAlign.dAAMatrix[0] = 1 +sAutoAlign.dAAMatrix[5] = 1 +sAutoAlign.dAAMatrix[10] = 1 +sAutoAlign.dAAMatrix[15] = 1 +sNavigatorPara.lBreathHoldMeas = 1 +sNavigatorPara.lRespComp = 4 +sNavigatorPara.alFree[22] = 2 +sNavigatorPara.adFree[13] = 150000 +sBladePara.dBladeCoverage = 100 +sBladePara.ucMotionCorr = 0x2 +sPrepPulses.ucFatSat = 0x1 +sPrepPulses.ucWaterSat = 0x4 +sPrepPulses.ucInversion = 0x4 +sPrepPulses.ucSatRecovery = 0x1 +sPrepPulses.ucT2Prep = 0x1 +sPrepPulses.ucTIScout = 0x1 +sPrepPulses.ucFatSatMode = 0x2 +sPrepPulses.dDarkBloodThickness = 200 +sPrepPulses.dDarkBloodFlipAngle = 200 +sPrepPulses.dT2PrepDuration = 40 +sPrepPulses.dIRPulseThicknessFactor = 0.77 +sKSpace.dPhaseResolution = 1 +sKSpace.dSliceResolution = 1 +sKSpace.dAngioDynCentralRegionA = 20 +sKSpace.dAngioDynSamplingDensityB = 25 +sKSpace.lBaseResolution = 128 +sKSpace.lPhaseEncodingLines = 128 +sKSpace.lPartitions = 64 +sKSpace.lImagesPerSlab = 64 +sKSpace.lRadialViews = 64 +sKSpace.lRadialInterleavesPerImage = 2 +sKSpace.lLinesPerShot = 1 +sKSpace.unReordering = 0x1 +sKSpace.dSeqPhasePartialFourierForSNR = 1 +sKSpace.ucPhasePartialFourier = 0x4 +sKSpace.ucSlicePartialFourier = 0x10 +sKSpace.ucAveragingMode = 0x2 +sKSpace.ucMultiSliceMode = 0x2 +sKSpace.ucDimension = 0x2 +sKSpace.ucTrajectory = 0x1 +sKSpace.ucViewSharing = 0x1 +sKSpace.ucAsymmetricEchoMode = 0x1 +sKSpace.ucPOCS = 0x1 +sFastImaging.lEPIFactor = 128 +sFastImaging.lTurboFactor = 1 +sFastImaging.lSliceTurboFactor = 1 +sFastImaging.lSegments = 1 +sFastImaging.ulEnableRFSpoiling = 0x1 +sFastImaging.ucSegmentationMode = 0x1 +sFastImaging.lShots = 1 +sFastImaging.lEchoTrainDuration = 700 +sPhysioImaging.lSignal1 = 1 +sPhysioImaging.lMethod1 = 1 +sPhysioImaging.lSignal2 = 1 +sPhysioImaging.lMethod2 = 1 +sPhysioImaging.lPhases = 1 +sPhysioImaging.lRetroGatedImages = 16 +sPhysioImaging.sPhysioECG.lTriggerPulses = 1 +sPhysioImaging.sPhysioECG.lTriggerWindow = 5 +sPhysioImaging.sPhysioECG.lArrhythmiaDetection = 1 +sPhysioImaging.sPhysioECG.lCardiacGateOnThreshold = 100000 +sPhysioImaging.sPhysioECG.lCardiacGateOffThreshold = 700000 +sPhysioImaging.sPhysioECG.lTriggerIntervals = 1 +sPhysioImaging.sPhysioPulse.lTriggerPulses = 1 +sPhysioImaging.sPhysioPulse.lTriggerWindow = 5 +sPhysioImaging.sPhysioPulse.lArrhythmiaDetection = 1 +sPhysioImaging.sPhysioPulse.lCardiacGateOnThreshold = 100000 +sPhysioImaging.sPhysioPulse.lCardiacGateOffThreshold = 700000 +sPhysioImaging.sPhysioPulse.lTriggerIntervals = 1 +sPhysioImaging.sPhysioExt.lTriggerPulses = 1 +sPhysioImaging.sPhysioExt.lTriggerWindow = 5 +sPhysioImaging.sPhysioExt.lArrhythmiaDetection = 1 +sPhysioImaging.sPhysioExt.lCardiacGateOnThreshold = 100000 +sPhysioImaging.sPhysioExt.lCardiacGateOffThreshold = 700000 +sPhysioImaging.sPhysioExt.lTriggerIntervals = 1 +sPhysioImaging.sPhysioResp.lRespGateThreshold = 20 +sPhysioImaging.sPhysioResp.lRespGatePhase = 2 +sPhysioImaging.sPhysioResp.dGatingRatio = 0.3 +sPhysioImaging.sPhysioNative.ucMode = 0x1 +sPhysioImaging.sPhysioNative.ucFlowSenMode = 0x1 +sSpecPara.lPhaseCyclingType = 1 +sSpecPara.lPhaseEncodingType = 1 +sSpecPara.lRFExcitationBandwidth = 1 +sSpecPara.ucRemoveOversampling = 0x1 +sSpecPara.lAutoRefScanNo = 1 +sSpecPara.lDecouplingType = 1 +sSpecPara.lNOEType = 1 +sSpecPara.lExcitationType = 1 +sSpecPara.lSpecAppl = 1 +sSpecPara.lSpectralSuppression = 1 +sDiffusion.lDiffWeightings = 2 +sDiffusion.alBValue[1] = 1000 +sDiffusion.lNoiseLevel = 40 +sDiffusion.lDiffDirections = 64 +sDiffusion.ulMode = 0x100 +sAngio.ucPCFlowMode = 0x2 +sAngio.ucTOFInflow = 0x4 +sAngio.lDynamicReconMode = 1 +sAngio.lTemporalInterpolation = 1 +sRawFilter.lSlope_256 = 25 +sRawFilter.ucOn = 0x1 +sRawFilter.ucMode = 0x1 +sDistortionCorrFilter.ucMode = 0x1 +sPat.lAccelFactPE = 2 +sPat.lAccelFact3D = 1 +sPat.lRefLinesPE = 38 +sPat.ucPATMode = 0x2 +sPat.ucRefScanMode = 0x4 +sPat.ucTPatAverageAllFrames = 0x1 +sMDS.ulMdsModeMask = 0x1 +sMDS.ulMdsVariableResolution = 0x1 +sMDS.lTableSpeedNumerator = 1 +sMDS.lmdsLinesPerSegment = 15 +sMDS.sMdsEndPosSBCS_mm.dTra = 600 +sMDS.ulMdsReconMode = 0x1 +sMDS.dMdsRangeExtension = 600 +ucEnableIntro = 0x1 +ucDisableChangeStoreImages = 0x1 +ucAAMode = 0x1 +ucAARegionMode = 1 +ucAARefMode = 1 +ucReconstructionMode = 0x1 +ucOneSeriesForAllMeas = 0x1 +ucPHAPSMode = 0x1 +ucDixon = 0x1 +ucDixonSaveOriginal = 0x1 +ucWaitForPrepareCompletion = 0x1 +lAverages = 1 +dAveragesDouble = 1 +adFlipAngleDegree[0] = 90 +lScanTimeSec = 449 +lTotalScanTimeSec = 450 +dRefSNR = 33479.60771 +dRefSNR_VOI = 33479.60771 +tdefaultEVAProt = ""%SiemensEvaDefProt%\DTI\DTI.evp"" +asCoilSelectMeas[0].tNucleus = ""1H"" +asCoilSelectMeas[0].iUsedRFactor = 3 +asCoilSelectMeas[0].asList[0].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[0].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[0].sCoilElementID.tElement = ""H3P"" +asCoilSelectMeas[0].asList[0].lElementSelected = 1 +asCoilSelectMeas[0].asList[0].lRxChannelConnected = 1 +asCoilSelectMeas[0].asList[1].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[1].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[1].sCoilElementID.tElement = ""H4P"" +asCoilSelectMeas[0].asList[1].lElementSelected = 1 +asCoilSelectMeas[0].asList[1].lRxChannelConnected = 2 +asCoilSelectMeas[0].asList[2].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[2].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[2].sCoilElementID.tElement = ""H4S"" +asCoilSelectMeas[0].asList[2].lElementSelected = 1 +asCoilSelectMeas[0].asList[2].lRxChannelConnected = 3 +asCoilSelectMeas[0].asList[3].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[3].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[3].sCoilElementID.tElement = ""H4T"" +asCoilSelectMeas[0].asList[3].lElementSelected = 1 +asCoilSelectMeas[0].asList[3].lRxChannelConnected = 4 +asCoilSelectMeas[0].asList[4].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[4].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[4].sCoilElementID.tElement = ""H3S"" +asCoilSelectMeas[0].asList[4].lElementSelected = 1 +asCoilSelectMeas[0].asList[4].lRxChannelConnected = 5 +asCoilSelectMeas[0].asList[5].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[5].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[5].sCoilElementID.tElement = ""H3T"" +asCoilSelectMeas[0].asList[5].lElementSelected = 1 +asCoilSelectMeas[0].asList[5].lRxChannelConnected = 6 +asCoilSelectMeas[0].asList[6].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[6].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[6].sCoilElementID.tElement = ""H1P"" +asCoilSelectMeas[0].asList[6].lElementSelected = 1 +asCoilSelectMeas[0].asList[6].lRxChannelConnected = 7 +asCoilSelectMeas[0].asList[7].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[7].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[7].sCoilElementID.tElement = ""H2P"" +asCoilSelectMeas[0].asList[7].lElementSelected = 1 +asCoilSelectMeas[0].asList[7].lRxChannelConnected = 8 +asCoilSelectMeas[0].asList[8].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[8].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[8].sCoilElementID.tElement = ""H2S"" +asCoilSelectMeas[0].asList[8].lElementSelected = 1 +asCoilSelectMeas[0].asList[8].lRxChannelConnected = 9 +asCoilSelectMeas[0].asList[9].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[9].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[9].sCoilElementID.tElement = ""H2T"" +asCoilSelectMeas[0].asList[9].lElementSelected = 1 +asCoilSelectMeas[0].asList[9].lRxChannelConnected = 10 +asCoilSelectMeas[0].asList[10].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[10].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[10].sCoilElementID.tElement = ""H1S"" +asCoilSelectMeas[0].asList[10].lElementSelected = 1 +asCoilSelectMeas[0].asList[10].lRxChannelConnected = 11 +asCoilSelectMeas[0].asList[11].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[11].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[11].sCoilElementID.tElement = ""H1T"" +asCoilSelectMeas[0].asList[11].lElementSelected = 1 +asCoilSelectMeas[0].asList[11].lRxChannelConnected = 12 +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[0] = 0xff +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[1] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[2] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[3] = 0xad +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[4] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[5] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[6] = 0x5d +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[7] = 0xb1 +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[8] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[9] = 0xb2 +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[10] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[0] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[1] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[2] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[3] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[4] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[5] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[6] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[7] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[8] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[9] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[10] = 0x2 +asCoilSelectMeas[0].aFFT_SCALE[0].flFactor = 3.77259 +asCoilSelectMeas[0].aFFT_SCALE[0].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[0].lRxChannel = 1 +asCoilSelectMeas[0].aFFT_SCALE[1].flFactor = 3.83164 +asCoilSelectMeas[0].aFFT_SCALE[1].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[1].lRxChannel = 2 +asCoilSelectMeas[0].aFFT_SCALE[2].flFactor = 3.7338 +asCoilSelectMeas[0].aFFT_SCALE[2].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[2].lRxChannel = 3 +asCoilSelectMeas[0].aFFT_SCALE[3].flFactor = 4.08449 +asCoilSelectMeas[0].aFFT_SCALE[3].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[3].lRxChannel = 4 +asCoilSelectMeas[0].aFFT_SCALE[4].flFactor = 3.82172 +asCoilSelectMeas[0].aFFT_SCALE[4].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[4].lRxChannel = 5 +asCoilSelectMeas[0].aFFT_SCALE[5].flFactor = 3.86816 +asCoilSelectMeas[0].aFFT_SCALE[5].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[5].lRxChannel = 6 +asCoilSelectMeas[0].aFFT_SCALE[6].flFactor = 4.48252 +asCoilSelectMeas[0].aFFT_SCALE[6].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[6].lRxChannel = 7 +asCoilSelectMeas[0].aFFT_SCALE[7].flFactor = 4.39406 +asCoilSelectMeas[0].aFFT_SCALE[7].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[7].lRxChannel = 8 +asCoilSelectMeas[0].aFFT_SCALE[8].flFactor = 4.50498 +asCoilSelectMeas[0].aFFT_SCALE[8].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[8].lRxChannel = 9 +asCoilSelectMeas[0].aFFT_SCALE[9].flFactor = 4.57011 +asCoilSelectMeas[0].aFFT_SCALE[9].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[9].lRxChannel = 10 +asCoilSelectMeas[0].aFFT_SCALE[10].flFactor = 4.6211 +asCoilSelectMeas[0].aFFT_SCALE[10].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[10].lRxChannel = 11 +asCoilSelectMeas[0].aFFT_SCALE[11].flFactor = 4.69845 +asCoilSelectMeas[0].aFFT_SCALE[11].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[11].lRxChannel = 12 +sEFISPEC.bEFIDataValid = 1 +ucCineMode = 0x1 +ucSequenceType = 0x4 +ucCoilCombineMode = 0x2 +ucFlipAngleMode = 0x1 +lTOM = 1 +lProtID = -434 +ucReadOutMode = 0x1 +ucBold3dPace = 0x1 +ucForcePositioningOnNDIS = 0x1 +ucInternalTablePosValid = 0x1 +sParametricMapping.ucParametricMap = 0x1 +sIR.lScanNumber = 1 +sAsl.ulMode = 0x1 +WaitForUserStart = 0x1 +ucAutoAlignInit = 0x1 +### ASCCONV END ### \ No newline at end of file diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py new file mode 100644 index 0000000000..7addb89d51 --- /dev/null +++ b/nibabel/nicom/tests/test_ascconv.py @@ -0,0 +1,40 @@ +""" Testing Siemens "ASCCONV" parser +""" + +from os.path import join as pjoin, dirname + +from .. import ascconv +from ...externals import OrderedDict + +from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +from numpy.testing import assert_array_equal, assert_array_almost_equal + +DATA_PATH = pjoin(dirname(__file__), 'data') +ASCCONV_INPUT = pjoin(DATA_PATH, 'ascconv_sample.txt') + + +def test_ascconv_parse(): + with open(ASCCONV_INPUT, 'rt') as fobj: + contents = fobj.read() + ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', contents) + assert_equal(attrs, OrderedDict()) + assert_equal(len(ascconv_dict), 917) + assert_equal(ascconv_dict['tProtocolName'], 'CBU+AF8-DTI+AF8-64D+AF8-1A') + assert_equal(ascconv_dict['ucScanRegionPosValid'], 1) + assert_array_almost_equal(ascconv_dict['sProtConsistencyInfo.flNominalB0'], + 2.89362) + assert_equal(ascconv_dict['sProtConsistencyInfo.flGMax'], 26) + + +def test_ascconv_w_attrs(): + in_str = ("### ASCCONV BEGIN object=MrProtDataImpl@MrProtocolData " + "version=41340006 " + "converter=%MEASCONST%/ConverterList/Prot_Converter.txt ###\n" + "test = \"hello\"\n" + "### ASCCONV END ###") + ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', in_str) + assert_equal(attrs['object'], 'MrProtDataImpl@MrProtocolData') + assert_equal(attrs['version'], '41340006') + assert_equal(attrs['converter'], + '%MEASCONST%/ConverterList/Prot_Converter.txt') + assert_equal(ascconv_dict['test'], 'hello') From 74bf0788a98a5f96d39915a28c6d7f35b375b89b Mon Sep 17 00:00:00 2001 From: Matthew Brett Date: Sun, 6 Mar 2016 00:10:24 -0800 Subject: [PATCH 002/689] RF: refactor ascconv to parse lists, sub-dicts Use assignment ast expressions to parse the ascconv assignment lines, filling in dicts and subdicts as we read. --- nibabel/nicom/ascconv.py | 179 ++++++++++++++++++++++++---- nibabel/nicom/tests/test_ascconv.py | 34 +++++- 2 files changed, 187 insertions(+), 26 deletions(-) diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index b63205b2cb..b4262230d2 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -3,7 +3,8 @@ """ Parse the "ASCCONV" meta data format found in a variety of Siemens MR files. """ -import ast, re +import re +import ast from ..externals import OrderedDict @@ -12,16 +13,160 @@ flags=re.M | re.S) -def parse_ascconv(csa_key, ascconv_str): +class AscconvParseError(Exception): + """ Error parsing ascconv file """ + + +class Atom(object): + """ Object to hold operation, object type and object identifier + + An atom represents an element in an expression. For example:: + + a.b[0].c + + has four elements. We call these elements "atoms". + + We represent objects (like ``a``) as dicts for convenience. + + The last element (``.c``) is an ``op = ast.Attribute`` operation where the + object type (`obj_type`) of ``c`` is not constrained (we can't tell from + the operation what type it is). The `obj_id` is the name of the object -- + "c". + + The second to last element ``[0]``, is ``op = ast.Subscript``, with object type + dict (we know from the subsequent operation ``.c`` that this must be an + object, we represent the object by a dict). The `obj_id` is the index 0. + + Parameters + ---------- + op : {'name', 'attr', 'list'} + Assignment type. Assignment to name (root namespace), attribute or + list element. + obj_type : {list, dict, other} + Object type being assigned to. + obj_id : str or int + Key (``obj_type is dict``) or index (``obj_type is list``) + """ + + def __init__(self, op, obj_type, obj_id): + self.op = op + self.obj_type = obj_type + self.obj_id = obj_id + + +class NoValue(object): + """ Signals no value present """ + + +def assign2atoms(assign_ast, default_class=int): + """ Parse single assignment ast from ascconv line into atoms + + Parameters + ---------- + assign_ast : assignment statement ast + ast derived from single line of ascconv file. + default_class : class, optional + Class that will create an object where we cannot yet know the object + type in the assignment. + + Returns + ------- + atoms : list + List of :class:`atoms`. See docstring for :class:`atoms`. Defines + left to right sequence of assignment in `line_ast`. + """ + if not len(assign_ast.targets) == 1: + raise AscconvParseError('Too many targets in assign') + target = assign_ast.targets[0] + atoms = [] + prev_target_type = default_class # Placeholder for any scalar value + while True: + if isinstance(target, ast.Name): + atoms.append(Atom(target, prev_target_type, target.id)) + break + if isinstance(target, ast.Attribute): + atoms.append(Atom(target, prev_target_type, target.attr)) + target = target.value + prev_target_type = OrderedDict + elif isinstance(target, ast.Subscript): + index = target.slice.value.n + atoms.append(Atom(target, prev_target_type, index)) + target = target.value + prev_target_type = list + else: + raise AscconvParseError( + 'Unexpected LHS element {0}'.format(target)) + return reversed(atoms) + + +def _create_obj_in(atom, root): + """ Create object defined in `atom` in dict-like given by `root` + + Return defined object. + """ + name = atom.obj_id + obj = root.get(name, NoValue) + if obj is not NoValue: + return obj + obj = atom.obj_type() + root[name] = obj + return obj + + +def _create_subscript_in(atom, root): + """ Create object defined in `atom` at index ``atom.obj_id`` in list `root` + + Return defined object. + """ + curr_n = len(root) + index = atom.obj_id + if curr_n > index: + return root[index] + obj = atom.obj_type() + root += [None] * (index - curr_n) + [obj] + return obj + + +def obj_from_atoms(atoms, namespace): + """ Return object defined by list `atoms` in dict-like `namespace` + + Parameters + ---------- + atoms : list + List of :class:`atoms` + namespace : dict-like + Namespace in which object will be defined. + + Returns + ------- + obj_root : object + Namespace such that we can set a desired value to the object defined in + `atoms` with ``obj_root[obj_key] = value``. + obj_key : str or int + Index into list or key into dictionary for `obj_root`. + """ + root_obj = namespace + for el in atoms: + prev_root = root_obj + if isinstance(el.op, (ast.Attribute, ast.Name)): + root_obj = _create_obj_in(el, root_obj) + else: + root_obj = _create_subscript_in(el, root_obj) + if not isinstance(root_obj, el.obj_type): + raise AscconvParseError( + 'Unexpected type for {0} in {1}'.format(el.obj_id, prev_root)) + return prev_root, el.obj_id + + +def parse_ascconv(ascconv_str, str_delim='"'): '''Parse the 'ASCCONV' format from `input_str`. Parameters ---------- - csa_key : str - The key in the CSA dict for the element containing `input_str`. Should - be 'MrPheonixProtocol' or 'MrProtocol'. ascconv_str : str The string we are parsing + str_delim : str, optional + String delimiter. Typically '"' or '""' Returns ------- @@ -32,29 +177,21 @@ def parse_ascconv(csa_key, ascconv_str): Raises ------ - SyntaxError + AsconvParseError A line of the ASCCONV section could not be parsed. ''' attrs, content = ASCCONV_RE.match(ascconv_str).groups() attrs = OrderedDict((tuple(x.split('=')) for x in attrs.split())) - if csa_key == 'MrPhoenixProtocol': - str_delim = '""' - elif csa_key == 'MrProtocol': - str_delim = '"' - else: - raise ValueError('Unknown protocol key: %s' % csa_key) # Normalize string start / end markers to something Python understands content = content.replace(str_delim, '"""') - ascconv_lines = content.split('\n') # Use Python's own parser to parse modified ASCCONV assignments tree = ast.parse(content) - result = OrderedDict() - for statement in tree.body: - assert isinstance(statement, ast.Assign) - value = ast.literal_eval(statement.value) - # Get LHS string from corresponding text line - key = ascconv_lines[statement.lineno - 1].split('=')[0].strip() - result[key] = value + prot_dict = OrderedDict() + for assign in tree.body: + atoms = assign2atoms(assign) + obj_to_index, key = obj_from_atoms(atoms, prot_dict) + value = assign.value.n if isinstance(assign.value, ast.Num) else assign.value.s + obj_to_index[key] = value - return result, attrs + return prot_dict, attrs diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py index 7addb89d51..af355d693b 100644 --- a/nibabel/nicom/tests/test_ascconv.py +++ b/nibabel/nicom/tests/test_ascconv.py @@ -3,6 +3,8 @@ from os.path import join as pjoin, dirname +import numpy as np + from .. import ascconv from ...externals import OrderedDict @@ -16,14 +18,36 @@ def test_ascconv_parse(): with open(ASCCONV_INPUT, 'rt') as fobj: contents = fobj.read() - ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', contents) + ascconv_dict, attrs = ascconv.parse_ascconv(contents, str_delim='""') assert_equal(attrs, OrderedDict()) - assert_equal(len(ascconv_dict), 917) + assert_equal(len(ascconv_dict), 72) assert_equal(ascconv_dict['tProtocolName'], 'CBU+AF8-DTI+AF8-64D+AF8-1A') assert_equal(ascconv_dict['ucScanRegionPosValid'], 1) - assert_array_almost_equal(ascconv_dict['sProtConsistencyInfo.flNominalB0'], + assert_array_almost_equal(ascconv_dict['sProtConsistencyInfo']['flNominalB0'], 2.89362) - assert_equal(ascconv_dict['sProtConsistencyInfo.flGMax'], 26) + assert_equal(ascconv_dict['sProtConsistencyInfo']['flGMax'], 26) + assert_equal(ascconv_dict['sSliceArray'].keys(), + ['asSlice', 'anAsc', 'anPos', 'lSize', 'lConc', 'ucMode', + 'sTSat']) + slice_arr = ascconv_dict['sSliceArray'] + as_slice = slice_arr['asSlice'] + assert_array_equal([e['dPhaseFOV'] for e in as_slice], 230) + assert_array_equal([e['dReadoutFOV'] for e in as_slice], 230) + assert_array_equal([e['dThickness'] for e in as_slice], 2.5) + # Some lists defined starting at 1, so have None as first element + assert_equal(slice_arr['anAsc'], [None] + list(range(1, 48))) + assert_equal(slice_arr['anPos'], [None] + list(range(1, 48))) + # A top level list + assert_equal(len(ascconv_dict['asCoilSelectMeas']), 1) + as_list = ascconv_dict['asCoilSelectMeas'][0]['asList'] + # This lower-level list does start indexing at 0 + assert_equal(len(as_list), 12) + for i, el in enumerate(as_list): + assert_equal( + el.keys(), + ['sCoilElementID', 'lElementSelected', 'lRxChannelConnected']) + assert_equal(el['lElementSelected'], 1) + assert_equal(el['lRxChannelConnected'], i + 1) def test_ascconv_w_attrs(): @@ -32,7 +56,7 @@ def test_ascconv_w_attrs(): "converter=%MEASCONST%/ConverterList/Prot_Converter.txt ###\n" "test = \"hello\"\n" "### ASCCONV END ###") - ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', in_str) + ascconv_dict, attrs = ascconv.parse_ascconv(in_str, '""') assert_equal(attrs['object'], 'MrProtDataImpl@MrProtocolData') assert_equal(attrs['version'], '41340006') assert_equal(attrs['converter'], From 340765abefd2db1c73e15ddd2a452787e5029683 Mon Sep 17 00:00:00 2001 From: Matthew Brett Date: Sun, 6 Mar 2016 00:40:02 -0800 Subject: [PATCH 003/689] BF: Python 3 fixes for ascconv parsing Unary minus seems to have gone from a number in the Python 2.7 ast, to a unary op in Python 3. --- nibabel/nicom/ascconv.py | 14 ++++++++++++-- nibabel/nicom/tests/test_ascconv.py | 6 ++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index b4262230d2..bfdf313b74 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -158,6 +158,17 @@ def obj_from_atoms(atoms, namespace): return prev_root, el.obj_id +def _get_value(assign): + value = assign.value + if isinstance(value, ast.Num): + return value.n + if isinstance(value, ast.Str): + return value.s + if isinstance(value, ast.UnaryOp) and isinstance(value.op, ast.USub): + return -value.operand.n + raise AscconvParseError('Unexpected RHS of assignment: {0}'.format(value)) + + def parse_ascconv(ascconv_str, str_delim='"'): '''Parse the 'ASCCONV' format from `input_str`. @@ -191,7 +202,6 @@ def parse_ascconv(ascconv_str, str_delim='"'): for assign in tree.body: atoms = assign2atoms(assign) obj_to_index, key = obj_from_atoms(atoms, prot_dict) - value = assign.value.n if isinstance(assign.value, ast.Num) else assign.value.s - obj_to_index[key] = value + obj_to_index[key] = _get_value(assign) return prot_dict, attrs diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py index af355d693b..80ea5dc4f1 100644 --- a/nibabel/nicom/tests/test_ascconv.py +++ b/nibabel/nicom/tests/test_ascconv.py @@ -26,7 +26,7 @@ def test_ascconv_parse(): assert_array_almost_equal(ascconv_dict['sProtConsistencyInfo']['flNominalB0'], 2.89362) assert_equal(ascconv_dict['sProtConsistencyInfo']['flGMax'], 26) - assert_equal(ascconv_dict['sSliceArray'].keys(), + assert_equal(list(ascconv_dict['sSliceArray'].keys()), ['asSlice', 'anAsc', 'anPos', 'lSize', 'lConc', 'ucMode', 'sTSat']) slice_arr = ascconv_dict['sSliceArray'] @@ -44,10 +44,12 @@ def test_ascconv_parse(): assert_equal(len(as_list), 12) for i, el in enumerate(as_list): assert_equal( - el.keys(), + list(el.keys()), ['sCoilElementID', 'lElementSelected', 'lRxChannelConnected']) assert_equal(el['lElementSelected'], 1) assert_equal(el['lRxChannelConnected'], i + 1) + # Test negative number + assert_array_almost_equal(as_slice[0]['sPosition']['dCor'], -20.03015269) def test_ascconv_w_attrs(): From 180fd5095d02e4a312efaf67c3a402ea3064f6f8 Mon Sep 17 00:00:00 2001 From: Cameron Riddell <31414128+CRiddler@users.noreply.github.com> Date: Thu, 15 Mar 2018 12:00:13 -0400 Subject: [PATCH 004/689] load save with pathlib.Path objects For python >= 3.6, objects representing paths now have the option to have a dunder `__fspath__` (See PEP 519 for more info) to return the string (or bytes) representation of the path. Pathlib on 3.6 includes this method so we should check for it first. Then, for pathlib objects from the py 3.4-3.5x, `__fspath__` is not available, so we dip into the py3k module to get the current python version's (2 or 3) string method (`unicode` in this case). So overall we're allowing filename to be a path object, but ensuring that it will be converted to the string/bytes representation of the path before passing it on to `image_klass.from_filename` My only reservations is that `nib.load` lowest entry point for this type of handling, because if some one really wants to call `from_filename` from an image_klass directly, they won't have this pathlib compatibility- however I think that `nib.load` is a very common entry point, which justifies the placement. --- nibabel/loadsave.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 1dc576498b..8eec269ee6 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -17,7 +17,7 @@ from .filebasedimages import ImageFileError from .imageclasses import all_image_classes from .arrayproxy import is_proxy -from .py3k import FileNotFoundError +from .py3k import FileNotFoundError, unicode from .deprecated import deprecate_with_version @@ -36,6 +36,11 @@ def load(filename, **kwargs): img : ``SpatialImage`` Image of guessed type ''' + if hasattr(filename, '__fspath__'): + filename = filename.__fspath__() + else: + filename = unicode(filename) + if not op.exists(filename): raise FileNotFoundError("No such file: '%s'" % filename) sniff = None From 9a9199513d55ef0099ccf0aa25942cb97e1079ac Mon Sep 17 00:00:00 2001 From: Matt Cieslak Date: Tue, 24 Jul 2018 14:58:52 -0400 Subject: [PATCH 005/689] some dicom headers had an empty string in SliceThickness --- nibabel/nicom/dicomwrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index fc3fbe4905..194227c6cf 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -201,7 +201,7 @@ def voxel_sizes(self): zs = self.get('SpacingBetweenSlices') if zs is None: zs = self.get('SliceThickness') - if zs is None: + if zs is None or zs == '': zs = 1 # Protect from python decimals in pydicom 0.9.7 zs = float(zs) From 87c7dc1a0e0a16cd84d95069c266bd32e35832a5 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Mon, 25 Jun 2018 14:06:11 +0100 Subject: [PATCH 006/689] ENH: Define Cifti2 Axes describing the rows/columns of the Cifti2 data --- nibabel/cifti2/__init__.py | 1 + nibabel/cifti2/cifti2.py | 40 +- nibabel/cifti2/cifti2_axes.py | 1224 +++++++++++++++++++++++++++++ nibabel/cifti2/tests/test_axes.py | 245 ++++++ nibabel/cifti2/tests/test_io.py | 176 +++++ nibabel/cifti2/tests/test_name.py | 19 + 6 files changed, 1704 insertions(+), 1 deletion(-) create mode 100644 nibabel/cifti2/cifti2_axes.py create mode 100644 nibabel/cifti2/tests/test_axes.py create mode 100644 nibabel/cifti2/tests/test_io.py create mode 100644 nibabel/cifti2/tests/test_name.py diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index 3025a6f991..0c80e4033b 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -25,3 +25,4 @@ Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ, Cifti2Vertices, Cifti2Volume, CIFTI_BRAIN_STRUCTURES, Cifti2HeaderError, CIFTI_MODEL_TYPES, load, save) +from .cifti2_axes import (Axis, BrainModel, Parcels, Series, Label, Scalar) \ No newline at end of file diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 67dab1d0c2..30bcbda73e 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1268,6 +1268,40 @@ def get_index_map(self, index): ''' return self.matrix.get_index_map(index) + def get_axis(self, index): + ''' + Generates the Cifti2 axis for a given dimension + + Parameters + ---------- + index : int + Dimension for which we want to obtain the mapping. + + Returns + ------- + axis : cifti2_axes.Axis + ''' + from . import cifti2_axes + return cifti2_axes.from_mapping(self.matrix.get_index_map(index)) + + @classmethod + def from_axes(cls, axes): + ''' + Creates a new Cifti2 header based on the Cifti2 axes + + Parameters + ---------- + axes : Tuple[cifti2_axes.Axis] + sequence of Cifti2 axes describing each row/column of the matrix to be stored + + Returns + ------- + header : Cifti2Header + new header describing the rows/columns in a format consistent with Cifti2 + ''' + from . import cifti2_axes + return cifti2_axes.to_header(axes) + class Cifti2Image(DataobjImage): """ Class for single file CIFTI2 format image @@ -1297,8 +1331,10 @@ def __init__(self, Object containing image data. It should be some object that returns an array from ``np.asanyarray``. It should have a ``shape`` attribute or property. - header : Cifti2Header instance + header : Cifti2Header instance or Sequence[cifti2_axes.Axis] Header with data for / from XML part of CIFTI2 format. + Alternatively a sequence of cifti2_axes.Axis objects can be provided + describing each dimension of the array. nifti_header : None or mapping or NIfTI2 header instance, optional Metadata for NIfTI2 component of this format. extra : None or mapping @@ -1306,6 +1342,8 @@ def __init__(self, file_map : mapping, optional Mapping giving file information for this image format. ''' + if not isinstance(header, Cifti2Header) and header: + header = Cifti2Header.from_axes(header) super(Cifti2Image, self).__init__(dataobj, header=header, extra=extra, file_map=file_map) self._nifti_header = Nifti2Header.from_header(nifti_header) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py new file mode 100644 index 0000000000..3125b58404 --- /dev/null +++ b/nibabel/cifti2/cifti2_axes.py @@ -0,0 +1,1224 @@ +import numpy as np +from nibabel.cifti2 import cifti2 +from six import string_types +from operator import xor + + +def from_mapping(mim): + """ + Parses the MatrixIndicesMap to find the appropriate CIFTI axis describing the rows or columns + + Parameters + ---------- + mim : cifti2.Cifti2MatrixIndicesMap + + Returns + ------- + subtype of Axis + """ + return_type = {'CIFTI_INDEX_TYPE_SCALARS': Scalar, + 'CIFTI_INDEX_TYPE_LABELS': Label, + 'CIFTI_INDEX_TYPE_SERIES': Series, + 'CIFTI_INDEX_TYPE_BRAIN_MODELS': BrainModel, + 'CIFTI_INDEX_TYPE_PARCELS': Parcels} + return return_type[mim.indices_map_to_data_type].from_mapping(mim) + + +def to_header(axes): + """ + Converts the axes describing the rows/columns of a CIFTI vector/matrix to a Cifti2Header + + Parameters + ---------- + axes : iterable[Axis] + one or more axes describing each dimension in turn + + Returns + ------- + cifti2.Cifti2Header + """ + axes = list(axes) + mims_all = [] + matrix = cifti2.Cifti2Matrix() + for dim, ax in enumerate(axes): + if ax in axes[:dim]: + dim_prev = axes.index(ax) + mims_all[dim_prev].applies_to_matrix_dimension.append(dim) + mims_all.append(mims_all[dim_prev]) + else: + mim = ax.to_mapping(dim) + mims_all.append(mim) + matrix.append(mim) + return cifti2.Cifti2Header(matrix) + + +class Axis(object): + """ + Generic object describing the rows or columns of a CIFTI vector/matrix + + Attributes + ---------- + arr : np.ndarray + (N, ) typed array with the actual information on each row/column + """ + _use_dtype = None + arr = None + + def __init__(self, arr): + self.arr = np.asarray(arr, dtype=self._use_dtype) + + def get_element(self, index): + """ + Extracts a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + Description of the row/column + """ + return self.arr[index] + + def __getitem__(self, item): + if isinstance(item, int): + return self.get_element(item) + if isinstance(item, string_types): + raise IndexError("Can not index an Axis with a string (except for Parcels)") + return type(self)(self.arr[item]) + + @property + def size(self, ): + return self.arr.size + + def __len__(self): + return self.size + + def __eq__(self, other): + return (type(self) == type(other) and + len(self) == len(other) and + (self.arr == other.arr).all()) + + def __add__(self, other): + """ + Concatenates two Axes of the same type + + Parameters + ---------- + other : Axis + axis to be appended to the current one + + Returns + ------- + Axis of the same subtype as self and other + """ + if type(self) == type(other): + return type(self)(np.append(self.arr, other.arr)) + return NotImplemented + + +class BrainModel(Axis): + """ + Each row/column in the CIFTI vector/matrix represents a single vertex or voxel + + This Axis describes which vertex/voxel is represented by each row/column. + + Attributes + ---------- + voxel : np.ndarray + (N, 3) array with the voxel indices + vertex : np.ndarray + (N, ) array with the vertex indices + name : np.ndarray + (N, ) array with the brain structure objects + """ + _use_dtype = np.dtype([('vertex', 'i4'), ('voxel', ('i4', 3)), + ('name', 'U%i' % max(len(name) for name in cifti2.CIFTI_BRAIN_STRUCTURES))]) + _affine = None + _volume_shape = None + + def __init__(self, arr, affine=None, volume_shape=None, nvertices=None): + """ + Creates a new BrainModel axis defining the vertices and voxels represented by each row/column + + Parameters + ---------- + arr : np.ndarray + (N, ) structured array with for every element a tuple with 3 elements: + - vertex index (-1 for voxels) + - 3 voxel indices (-1 for vertices) + - string (name of brain structure) + affine : np.ndarray + (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) + volume_shape : Tuple[int, int, int] + shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) + nvertices : dict[String -> int] + maps names of surface elements to integers + """ + super(BrainModel, self).__init__(arr) + self.name = self.name # correct names to CIFTI brain structures + if nvertices is None: + self.nvertices = {} + else: + self.nvertices = dict(nvertices) + for name in list(self.nvertices.keys()): + if name not in self.name: + del self.nvertices[name] + if self.is_surface.all(): + self.affine = None + self.volume_shape = None + else: + self.affine = affine + self.volume_shape = volume_shape + + @classmethod + def from_mapping(cls, mim): + """ + Creates a new BrainModel axis based on a CIFTI dataset + + Parameters + ---------- + mim : cifti2.Cifti2MatrixIndicesMap + + Returns + ------- + BrainModel + """ + nbm = np.sum([bm.index_count for bm in mim.brain_models]) + arr = np.zeros(nbm, dtype=cls._use_dtype) + arr['voxel'] = -1 + arr['vertex'] = -1 + nvertices = {} + affine, shape = None, None + for bm in mim.brain_models: + index_end = bm.index_offset + bm.index_count + is_surface = bm.model_type == 'CIFTI_MODEL_TYPE_SURFACE' + arr['name'][bm.index_offset: index_end] = bm.brain_structure + if is_surface: + arr['vertex'][bm.index_offset: index_end] = bm.vertex_indices + nvertices[bm.brain_structure] = bm.surface_number_of_vertices + else: + arr['voxel'][bm.index_offset: index_end, :] = bm.voxel_indices_ijk + if affine is None: + shape = mim.volume.volume_dimensions + affine = mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix + else: + if shape != mim.volume.volume_dimensions: + raise ValueError("All volume masks should be defined in the same volume") + if (affine != mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix).any(): + raise ValueError("All volume masks should have the same affine") + return cls(arr, affine, shape, nvertices) + + @classmethod + def from_mask(cls, mask, name='other', affine=None): + """ + Creates a new BrainModel axis describing the provided mask + + Parameters + ---------- + mask : np.ndarray + all non-zero voxels will be included in the BrainModel axis + should be (Nx, Ny, Nz) array for volume mask or (Nvertex, ) array for surface mask + name : str + Name of the brain structure (e.g. 'CortexRight', 'thalamus_left' or 'brain_stem') + affine : np.ndarray + (4, 4) array with the voxel to mm transformation (defaults to identity matrix) + Argument will be ignored for surface masks + + Returns + ------- + BrainModel which covers the provided mask + """ + if affine is None: + affine = np.eye(4) + if np.asarray(affine).shape != (4, 4): + raise ValueError("Affine transformation should be a 4x4 array or None, not %r" % affine) + if mask.ndim == 1: + return cls.from_surface(np.where(mask != 0)[0], mask.size, name=name) + elif mask.ndim == 3: + voxels = np.array(np.where(mask != 0)).T + arr = np.zeros(len(voxels), dtype=cls._use_dtype) + arr['vertex'] = -1 + arr['voxel'] = voxels + arr['name'] = cls.to_cifti_brain_structure_name(name) + return cls(arr, affine=affine, volume_shape=mask.shape) + else: + raise ValueError("Mask should be either 1-dimensional (for surfaces) or " + "3-dimensional (for volumes), not %i-dimensional" % mask.ndim) + + @classmethod + def from_surface(cls, vertices, nvertex, name='Cortex'): + """ + Creates a new BrainModel axis describing the vertices on a surface + + Parameters + ---------- + vertices : np.ndarray + indices of the vertices on the surface + nvertex : int + total number of vertices on the surface + name : str + Name of the brain structure (e.g. 'CortexLeft' or 'CortexRight') + + Returns + ------- + BrainModel which covers (part of) the surface + """ + arr = np.zeros(len(vertices), dtype=cls._use_dtype) + arr['voxel'] = -1 + arr['vertex'] = vertices + arr['name'] = cls.to_cifti_brain_structure_name(name) + return cls(arr, nvertices={arr['name'][0]: nvertex}) + + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 3 elements + - boolean, which is True if it is a surface element + - vertex index if it is a surface element, otherwise array with 3 voxel indices + - structure.BrainStructure object describing the brain structure the element was taken from + """ + elem = self.arr[index] + is_surface = elem['name'] in self.nvertices.keys() + name = 'vertex' if is_surface else 'voxel' + return is_surface, elem[name], elem['name'] + + def to_mapping(self, dim): + """ + Converts the brain model axis to a MatrixIndicesMap for storage in CIFTI format + + Parameters + ---------- + dim : int + which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + + Returns + ------- + cifti2.Cifti2MatrixIndicesMap + """ + mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_BRAIN_MODELS') + for name, to_slice, bm in self.iter_structures(): + is_surface = name in self.nvertices.keys() + if is_surface: + voxels = None + vertices = cifti2.Cifti2VertexIndices(bm.vertex) + nvertex = self.nvertices[name] + else: + voxels = cifti2.Cifti2VoxelIndicesIJK(bm.voxel) + vertices = None + nvertex = None + if mim.volume is None: + affine = cifti2.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ(-3, matrix=self.affine) + mim.volume = cifti2.Cifti2Volume(self.volume_shape, affine) + cifti_bm = cifti2.Cifti2BrainModel(to_slice.start, len(bm), + 'CIFTI_MODEL_TYPE_SURFACE' if is_surface else 'CIFTI_MODEL_TYPE_VOXELS', + name, nvertex, voxels, vertices) + mim.append(cifti_bm) + return mim + + def iter_structures(self, ): + """ + Iterates over all brain structures in the order that they appear along the axis + + Yields + ------ + tuple with + - CIFTI brain structure name + - slice to select the data associated with the brain structure from the tensor + - brain model covering that specific brain structure + """ + idx_start = 0 + start_name = self.name[idx_start] + for idx_current, name in enumerate(self.name): + if start_name != name: + yield start_name, slice(idx_start, idx_current), self[idx_start: idx_current] + idx_start = idx_current + start_name = self.name[idx_start] + yield start_name, slice(idx_start, None), self[idx_start:] + + @property + def affine(self, ): + return self._affine + + @affine.setter + def affine(self, value): + if value is not None: + value = np.asarray(value) + if value.shape != (4, 4): + raise ValueError('Affine transformation should be a 4x4 array') + self._affine = value + + @property + def volume_shape(self, ): + return self._volume_shape + + @volume_shape.setter + def volume_shape(self, value): + if value is not None: + value = tuple(value) + if len(value) != 3: + raise ValueError("Volume shape should be a tuple of length 3") + self._volume_shape = value + + @property + def is_surface(self, ): + """True for any element on the surface + """ + return np.vectorize(lambda name: name in self.nvertices.keys())(self.name) + + @property + def voxel(self, ): + """The voxel represented by each row or column + """ + return self.arr['voxel'] + + @voxel.setter + def voxel(self, values): + self.arr['voxel'] = values + + @property + def vertex(self, ): + """The vertex represented by each row or column + """ + return self.arr['vertex'] + + @vertex.setter + def vertex(self, values): + self.arr['vertex'] = values + + @property + def name(self, ): + """The brain structure to which the voxel/vertices of belong + """ + return self.arr['name'] + + @name.setter + def name(self, values): + self.arr['name'] = [self.to_cifti_brain_structure_name(name) for name in values] + + @staticmethod + def to_cifti_brain_structure_name(name): + """ + Attempts to convert the name of an anatomical region in a format recognized by CIFTI + + This function returns: + * the name if it is in the CIFTI format already + * if the name is a tuple the first element is assumed to be the structure name while + the second is assumed to be the hemisphere (left, right or both). The latter will default + to both. + * names like left_cortex, cortex_left, LeftCortex, or CortexLeft will be converted to + CIFTI_STRUCTURE_CORTEX_LEFT + + see ``nibabel.cifti2.tests.test_name`` for examples of which conversions are possible + + Parameters + ---------- + name: (str, tuple) + input name of an anatomical region + + Returns + ------- + CIFTI2 compatible name + + Raises + ------ + ValueError: raised if the input name does not match a known anatomical structure in CIFTI + """ + if name in cifti2.CIFTI_BRAIN_STRUCTURES: + return name + if not isinstance(name, string_types): + if len(name) == 1: + structure = name[0] + orientation = 'both' + else: + structure, orientation = name + if structure.lower() in ('left', 'right', 'both'): + orientation, structure = name + else: + orient_names = ('left', 'right', 'both') + for poss_orient in orient_names: + idx = len(poss_orient) + if poss_orient == name.lower()[:idx]: + orientation = poss_orient + if name[idx] in '_ ': + structure = name[idx + 1:] + else: + structure = name[idx:] + break + if poss_orient == name.lower()[-idx:]: + orientation = poss_orient + if name[-idx - 1] in '_ ': + structure = name[:-idx - 1] + else: + structure = name[:-idx] + break + else: + orientation = 'both' + structure = name + if orientation.lower() == 'both': + proposed_name = 'CIFTI_STRUCTURE_%s' % structure.upper() + else: + proposed_name = 'CIFTI_STRUCTURE_%s_%s' % (structure.upper(), orientation.upper()) + if proposed_name not in cifti2.CIFTI_BRAIN_STRUCTURES: + raise ValueError('%s was interpreted as %s, which is not a valid CIFTI brain structure' % + (name, proposed_name)) + return proposed_name + + def __getitem__(self, item): + if isinstance(item, int): + return self.get_element(item) + if isinstance(item, string_types): + raise IndexError("Can not index an Axis with a string (except for Parcels)") + return type(self)(self.arr[item], self.affine, self.volume_shape, self.nvertices) + + def __eq__(self, other): + if type(self) != type(other) or len(self) != len(other): + return False + if xor(self.affine is None, other.affine is None): + return False + return (((self.affine is None and other.affine is None) or + (abs(self.affine - other.affine).max() < 1e-8 and + self.volume_shape == other.volume_shape)) and + (self.nvertices == other.nvertices) and + (self.arr == other.arr).all()) + + def __add__(self, other): + """ + Concatenates two BrainModels + + Parameters + ---------- + other : BrainModel + brain model to be appended to the current one + + Returns + ------- + BrainModel + """ + if type(self) == type(other): + if self.affine is None: + affine, shape = other.affine, other.volume_shape + else: + affine, shape = self.affine, self.volume_shape + if other.affine is not None and ((other.affine != affine).all() or + other.volume_shape != shape): + raise ValueError("Trying to concatenate two BrainModels defined in a different brain volume") + nvertices = dict(self.nvertices) + for name, value in other.nvertices.items(): + if name in nvertices.keys() and nvertices[name] != value: + raise ValueError("Trying to concatenate two BrainModels with inconsistent number of vertices for %s" + % name) + nvertices[name] = value + return type(self)(np.append(self.arr, other.arr), affine, shape, nvertices) + return NotImplemented + + +class Parcels(Axis): + """ + Each row/column in the CIFTI vector/matrix represents a parcel of voxels/vertices + + This Axis describes which parcel is represented by each row/column. + + Attributes + ---------- + name : np.ndarray + (N, ) string array with the parcel names + parcel : np.ndarray + (N, ) array with the actual get_parcels (each of which is a BrainModel object) + + Individual get_parcels can also be accessed based on their name, using + >>> parcel = parcel_axis[name] + """ + _use_dtype = np.dtype([('name', 'U60'), ('voxels', 'object'), ('vertices', 'object')]) + _affine = None + _volume_shape = None + + def __init__(self, arr, affine=None, volume_shape=None, nvertices=None): + """ + Creates a new BrainModel axis defining the vertices and voxels represented by each row/column + + Parameters + ---------- + arr : np.ndarray + (N, ) structured array with for every element a tuple with 3 elements: + - string (name of parcel) + - (M, 3) int array with the M voxel indices in the parcel + - Dict[String -> (K, ) int array] mapping surface brain structure names to vertex indices + affine : np.ndarray + (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) + volume_shape : Tuple[int, int, int] + shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) + nvertices : dict[String -> int] + maps names of surface elements to integers + """ + super(Parcels, self).__init__(arr) + self.affine = affine + self.volume_shape = volume_shape + if nvertices is None: + self.nvertices = {} + else: + self.nvertices = dict(nvertices) + + @classmethod + def from_brain_models(cls, named_brain_models): + """ + Creates a Parcel axis from a list of BrainModel axes with names + + Parameters + ---------- + named_brain_models : List[Tuple[String, BrainModel]] + list of (parcel name, brain model representation) pairs defining each parcel + + Returns + ------- + Parcels + """ + affine = None + volume_shape = None + arr = np.zeros(len(named_brain_models), dtype=cls._use_dtype) + nvertices = {} + for idx_parcel, (parcel_name, bm) in enumerate(named_brain_models): + voxels = bm.voxel[~bm.is_surface] + if voxels.shape[0] != 0: + if affine is None: + affine = bm.affine + volume_shape = bm.volume_shape + else: + if (affine != bm.affine).any() or (volume_shape != bm.volume_shape): + raise ValueError( + "Can not combine brain models defined in different volumes into a single Parcel axis") + vertices = {} + for name, _, bm_part in bm.iter_structures(): + if name in bm.nvertices.keys(): + if name in nvertices.keys() and nvertices[name] != bm.nvertices[name]: + raise ValueError("Got multiple conflicting number of vertices for surface structure %s" % name) + nvertices[name] = bm.nvertices[name] + vertices[name] = bm_part.vertex + arr[idx_parcel] = (parcel_name, voxels, vertices) + return Parcels(arr, affine, volume_shape, nvertices) + + @classmethod + def from_mapping(cls, mim): + """ + Creates a new Parcels axis based on a CIFTI dataset + + Parameters + ---------- + mim : cifti2.Cifti2MatrixIndicesMap + + Returns + ------- + Parcels + """ + nparcels = len(list(mim.parcels)) + arr = np.zeros(nparcels, dtype=cls._use_dtype) + volume_shape = None if mim.volume is None else mim.volume.volume_dimensions + affine = None if mim.volume is None else mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix + nvertices = {} + for surface in mim.surfaces: + nvertices[surface.brain_structure] = surface.surface_number_of_vertices + for idx_parcel, parcel in enumerate(mim.parcels): + nvoxels = 0 if parcel.voxel_indices_ijk is None else len(parcel.voxel_indices_ijk) + voxels = np.zeros((nvoxels, 3), dtype='i4') + if nvoxels != 0: + voxels[()] = parcel.voxel_indices_ijk + vertices = {} + for vertex in parcel.vertices: + name = vertex.brain_structure + vertices[vertex.brain_structure] = np.array(vertex) + if name not in nvertices.keys(): + raise ValueError("Number of vertices for surface structure %s not defined" % name) + arr[idx_parcel]['voxels'] = voxels + arr[idx_parcel]['vertices'] = vertices + arr[idx_parcel]['name'] = parcel.name + return cls(arr, affine, volume_shape, nvertices) + + def to_mapping(self, dim): + """ + Converts the get_parcels to a MatrixIndicesMap for storage in CIFTI format + + Parameters + ---------- + dim : int + which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + + Returns + ------- + cifti2.Cifti2MatrixIndicesMap + """ + mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_PARCELS') + if self.affine is not None: + affine = cifti2.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ(-3, matrix=self.affine) + mim.volume = cifti2.Cifti2Volume(self.volume_shape, affine) + for name, nvertex in self.nvertices.items(): + mim.append(cifti2.Cifti2Surface(name, nvertex)) + for name, voxels, vertices in self.arr: + cifti_voxels = cifti2.Cifti2VoxelIndicesIJK(voxels) + element = cifti2.Cifti2Parcel(name, cifti_voxels) + for name, idx_vertices in vertices.items(): + element.vertices.append(cifti2.Cifti2Vertices(name, idx_vertices)) + mim.append(element) + return mim + + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 3 elements + - unicode name of the parcel + - (M, 3) int array with voxel indices + - Dict[String -> (K, ) int array] with vertex indices for a specific surface brain structure + """ + return self.name[index], self.voxels[index], self.vertices[index] + + @property + def affine(self, ): + return self._affine + + @affine.setter + def affine(self, value): + if value is not None: + value = np.asarray(value) + if value.shape != (4, 4): + raise ValueError('Affine transformation should be a 4x4 array') + self._affine = value + + @property + def volume_shape(self, ): + return self._volume_shape + + @volume_shape.setter + def volume_shape(self, value): + if value is not None: + value = tuple(value) + if len(value) != 3: + raise ValueError("Volume shape should be a tuple of length 3") + self._volume_shape = value + + @property + def name(self, ): + return self.arr['name'] + + @name.setter + def name(self, values): + self.arr['name'] = values + + @property + def voxels(self, ): + return self.arr['voxels'] + + @voxels.setter + def voxels(self, values): + self.arr['voxels'] = values + + @property + def vertices(self, ): + return self.arr['vertices'] + + @vertices.setter + def vertices(self, values): + self.arr['vertices'] = values + + def __getitem__(self, item): + if isinstance(item, string_types): + idx = np.where(self.name == item)[0] + if len(idx) == 0: + raise IndexError("Parcel %s not found" % item) + if len(idx) > 1: + raise IndexError("Multiple get_parcels with name %s found" % item) + return self.voxels[idx[0]], self.vertices[idx[0]] + if isinstance(item, int): + return self.get_element(item) + if isinstance(item, string_types): + raise IndexError("Can not index an Axis with a string (except for Parcels)") + return type(self)(self.arr[item], self.affine, self.volume_shape, self.nvertices) + + def __eq__(self, other): + if (type(self) != type(other) or len(self) != len(other) or + (self.name != other.name).all() or self.nvertices != other.nvertices or + any((vox1 != vox2).any() for vox1, vox2 in zip(self.voxels, other.voxels))): + return False + if self.affine is not None: + if ( other.affine is None or + abs(self.affine - other.affine).max() > 1e-8 or + self.volume_shape != other.volume_shape): + return False + elif other.affine is not None: + return False + for vert1, vert2 in zip(self.vertices, other.vertices): + if len(vert1) != len(vert2): + return False + for name in vert1.keys(): + if name not in vert2 or (vert1[name] != vert2[name]).all(): + return False + return True + + def __add__(self, other): + """ + Concatenates two Parcels + + Parameters + ---------- + other : Parcel + parcel to be appended to the current one + + Returns + ------- + Parcel + """ + if type(self) == type(other): + if self.affine is None: + affine, shape = other.affine, other.volume_shape + else: + affine, shape = self.affine, self.volume_shape + if other.affine is not None and ((other.affine != affine).all() or + other.volume_shape != shape): + raise ValueError("Trying to concatenate two Parcels defined in a different brain volume") + nvertices = dict(self.nvertices) + for name, value in other.nvertices.items(): + if name in nvertices.keys() and nvertices[name] != value: + raise ValueError("Trying to concatenate two Parcels with inconsistent number of vertices for %s" + % name) + nvertices[name] = value + return type(self)(np.append(self.arr, other.arr), affine, shape, nvertices) + return NotImplemented + + +class Scalar(Axis): + """ + Along this axis of the CIFTI vector/matrix each row/column has been given a unique name and optionally metadata + + Attributes + ---------- + name : np.ndarray + (N, ) string array with the parcel names + meta : np.ndarray + (N, ) array with a dictionary of metadata for each row/column + """ + _use_dtype = np.dtype([('name', 'U60'), ('meta', 'object')]) + + def __init__(self, arr): + """ + Creates a new Scalar axis from (name, meta-data) pairs + + Parameters + ---------- + arr : Iterable[Tuple[str, dict[str -> str]] + iterates over all rows/columns assigning a name and a dictionary of metadata to each + """ + super(Scalar, self).__init__(arr) + + @classmethod + def from_mapping(cls, mim): + """ + Creates a new get_scalar axis based on a CIFTI dataset + + Parameters + ---------- + mim : cifti2.Cifti2MatrixIndicesMap + + Returns + ------- + Scalar + """ + res = np.zeros(len(list(mim.named_maps)), dtype=cls._use_dtype) + res['name'] = [nm.map_name for nm in mim.named_maps] + res['meta'] = [{} if nm.metadata is None else dict(nm.metadata) for nm in mim.named_maps] + return cls(res) + + @classmethod + def from_names(cls, names): + """ + Creates a new get_scalar axis with the given row/column names + + Parameters + ---------- + names : List[str] + gives a unique name to every row/column in the matrix + + Returns + ------- + Scalar + """ + res = np.zeros(len(names), dtype=cls._use_dtype) + res['name'] = names + res['meta'] = [{} for _ in names] + return cls(res) + + def to_mapping(self, dim): + """ + Converts the hcp_labels to a MatrixIndicesMap for storage in CIFTI format + + Parameters + ---------- + dim : int + which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + + Returns + ------- + cifti2.Cifti2MatrixIndicesMap + """ + mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_SCALARS') + for elem in self.arr: + meta = None if len(elem['meta']) == 0 else elem['meta'] + named_map = cifti2.Cifti2NamedMap(elem['name'], cifti2.Cifti2MetaData(meta)) + mim.append(named_map) + return mim + + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 2 elements + - unicode name of the get_scalar + - dictionary with the element metadata + """ + return self.arr['name'][index], self.arr['meta'][index] + + def to_label(self, labels): + """ + Creates a new Label axis based on the Scalar axis + + Parameters + ---------- + labels : list[dict] + mapping from integers to (name, (R, G, B, A)), where `name` is a string and R, G, B, and A are floats + between 0 and 1 giving the colour and alpha (transparency) + + Returns + ------- + Label + """ + res = np.zeros(self.size, dtype=Label._use_dtype) + res['name'] = self.arr['name'] + res['meta'] = self.arr['meta'] + res['get_label'] = labels + return Label(res) + + @property + def name(self, ): + return self.arr['name'] + + @name.setter + def name(self, values): + self.arr['name'] = values + + @property + def meta(self, ): + return self.arr['meta'] + + @meta.setter + def meta(self, values): + self.arr['meta'] = values + + +class Label(Axis): + """ + Along this axis of the CIFTI vector/matrix each row/column has been given a unique name, + get_label table, and optionally metadata + + Attributes + ---------- + name : np.ndarray + (N, ) string array with the parcel names + meta : np.ndarray + (N, ) array with a dictionary of metadata for each row/column + get_label : sp.ndarray + (N, ) array with dictionaries mapping integer values to get_label names and RGBA colors + """ + _use_dtype = np.dtype([('name', 'U60'), ('get_label', 'object'), ('meta', 'object')]) + + def __init__(self, arr): + """ + Creates a new Scalar axis from (name, meta-data) pairs + + Parameters + ---------- + arr : Iterable[Tuple[str, dict[int -> (str, (float, float, float, float)), dict(str->str)]] + iterates over all rows/columns assigning a name, dictionary mapping integers to get_label names and rgba colours + and a dictionary of metadata to each + """ + super(Label, self).__init__(arr) + + @classmethod + def from_mapping(cls, mim): + """ + Creates a new get_scalar axis based on a CIFTI dataset + + Parameters + ---------- + mim : cifti2.Cifti2MatrixIndicesMap + + Returns + ------- + Scalar + """ + tables = [{key: (value.label, value.rgba) for key, value in nm.label_table.items()} + for nm in mim.named_maps] + return Scalar.from_mapping(mim).to_label(tables) + + def to_mapping(self, dim): + """ + Converts the hcp_labels to a MatrixIndicesMap for storage in CIFTI format + + Parameters + ---------- + dim : int + which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + + Returns + ------- + cifti2.Cifti2MatrixIndicesMap + """ + mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_LABELS') + for elem in self.arr: + label_table = cifti2.Cifti2LabelTable() + for key, value in elem['get_label'].items(): + label_table[key] = (value[0],) + tuple(value[1]) + meta = None if len(elem['meta']) == 0 else elem['meta'] + named_map = cifti2.Cifti2NamedMap(elem['name'], cifti2.Cifti2MetaData(meta), + label_table) + mim.append(named_map) + return mim + + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 2 elements + - unicode name of the get_scalar + - dictionary with the get_label table + - dictionary with the element metadata + """ + return self.arr['name'][index], self.arr['get_label'][index], self.arr['meta'][index] + + @property + def name(self, ): + return self.arr['name'] + + @name.setter + def name(self, values): + self.arr['name'] = values + + @property + def meta(self, ): + return self.arr['meta'] + + @meta.setter + def meta(self, values): + self.arr['meta'] = values + + @property + def label(self, ): + return self.arr['get_label'] + + @label.setter + def label(self, values): + self.arr['get_label'] = values + + +class Series(Axis): + """ + Along this axis of the CIFTI vector/matrix the rows/columns increase monotonously in time + + This Axis describes the time point of each row/column. + + Attributes + ---------- + start : float + starting time point + step : float + sampling time (TR) + size : int + number of time points + """ + size = None + _unit = None + + def __init__(self, start, step, size, unit="SECOND"): + """ + Creates a new Series axis + + Parameters + ---------- + start : float + Time of the first datapoint + step : float + Step size between data points + size : int + Number of data points + unit : str + Unit of the step size (one of 'second', 'hertz', 'meter', or 'radian') + """ + self.unit = unit + self.start = start + self.step = step + self.size = size + + @property + def unit(self, ): + return self._unit + + @unit.setter + def unit(self, value): + if value.upper() not in ("SECOND", "HERTZ", "METER", "RADIAN"): + raise ValueError("Series unit should be one of ('second', 'hertz', 'meter', or 'radian'") + self._unit = value.upper() + + @property + def arr(self, ): + return np.arange(self.size) * self.step + self.start + + @classmethod + def from_mapping(cls, mim): + """ + Creates a new Series axis based on a CIFTI dataset + + Parameters + ---------- + mim : cifti2.Cifti2MatrixIndicesMap + + Returns + ------- + Series + """ + start = mim.series_start * 10 ** mim.series_exponent + step = mim.series_step * 10 ** mim.series_exponent + return cls(start, step, mim.number_of_series_points, mim.series_unit) + + def to_mapping(self, dim): + """ + Converts the get_series to a MatrixIndicesMap for storage in CIFTI format + + Parameters + ---------- + dim : int + which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + + Returns + ------- + cifti2.Cifti2MatrixIndicesMap + """ + mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_SERIES') + mim.series_exponent = 0 + mim.series_start = self.start + mim.series_step = self.step + mim.number_of_series_points = self.size + mim.series_unit = self.unit + return mim + + def extend(self, other_axis): + """ + Concatenates two get_series + + Note: this will ignore the start point of the other axis + + Parameters + ---------- + other_axis : Series + other axis + + Returns + ------- + Series + """ + if other_axis.step != self.step: + raise ValueError('Can only concatenate get_series with the same step size') + if other_axis.unit != self.unit: + raise ValueError('Can only concatenate get_series with the same unit') + return Series(self.start, self.step, self.size + other_axis.size, self.unit) + + def __getitem__(self, item): + if isinstance(item, slice): + step = 1 if item.step is None else item.step + idx_start = ((self.size - 1 if step < 0 else 0) + if item.start is None else + (item.start if item.start >= 0 else self.size + item.start)) + idx_end = ((-1 if step < 0 else self.size) + if item.stop is None else + (item.stop if item.stop >= 0 else self.size + item.stop)) + if idx_start > self.size: + idx_start = self.size - 1 + if idx_end > self.size: + idx_end = self.size + nelements = (idx_end - idx_start) // step + if nelements < 0: + nelements = 0 + return Series(idx_start * self.step + self.start, self.step * step, nelements) + elif isinstance(item, int): + return self.get_element(item) + raise IndexError('Series can only be indexed with integers or slices without breaking the regular structure') + + def get_element(self, index): + """ + Gives the time point of a specific row/column + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + float + """ + if index < 0: + index = self.size + index + if index >= self.size: + raise IndexError("index %i is out of range for get_series with size %i" % (index, self.size)) + return self.start + self.step * index + + def __add__(self, other): + """ + Concatenates two Series + + Parameters + ---------- + other : Series + Time get_series to append at the end of the current time get_series. + Note that the starting time of the other time get_series is ignored. + + Returns + ------- + Series + New time get_series with the concatenation of the two + + Raises + ------ + ValueError + raised if the repetition time of the two time get_series is different + """ + if isinstance(other, Series): + return self.extend(other) + return NotImplemented diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py new file mode 100644 index 0000000000..64e94f2663 --- /dev/null +++ b/nibabel/cifti2/tests/test_axes.py @@ -0,0 +1,245 @@ +import numpy as np +from nose.tools import assert_raises +from .test_io import check_rewrite +import nibabel.cifti2.cifti2_axes as axes + + +rand_affine = np.random.randn(4, 4) +vol_shape = (5, 10, 3) + + +def get_brain_models(): + """ + Generates a set of practice BrainModel axes + + Yields + ------ + BrainModel axis + """ + mask = np.zeros(vol_shape) + mask[0, 1, 2] = 1 + mask[0, 4, 2] = True + mask[0, 4, 0] = True + yield axes.BrainModel.from_mask(mask, 'ThalamusRight', rand_affine) + mask[0, 0, 0] = True + yield axes.BrainModel.from_mask(mask, affine=rand_affine) + + yield axes.BrainModel.from_surface([0, 5, 10], 15, 'CortexLeft') + yield axes.BrainModel.from_surface([0, 5, 10, 13], 15) + + surface_mask = np.zeros(15, dtype='bool') + surface_mask[[2, 9, 14]] = True + yield axes.BrainModel.from_mask(surface_mask, name='CortexRight') + + +def get_parcels(): + """ + Generates a practice Parcel axis out of all practice brain models + + Returns + ------- + Parcel axis + """ + bml = list(get_brain_models()) + return axes.Parcels.from_brain_models([('mixed', bml[0] + bml[2]), ('volume', bml[1]), ('surface', bml[3])]) + + +def get_scalar(): + """ + Generates a practice Scalar axis with names ('one', 'two', 'three') + + Returns + ------- + Scalar axis + """ + return axes.Scalar.from_names(['one', 'two', 'three']) + + +def get_label(): + """ + Generates a practice Label axis with names ('one', 'two', 'three') and two labels + + Returns + ------- + Label axis + """ + return axes.Scalar.from_names(['one', 'two', 'three']).to_label({0: ('something', (0.2, 0.4, 0.1, 0.5)), + 1: ('even better', (0.3, 0.8, 0.43, 0.9))}) + +def get_series(): + """ + Generates a set of 4 practice Series axes with different starting times/lengths/time steps and units + + Yields + ------ + Series axis + """ + yield axes.Series(3, 10, 4) + yield axes.Series(8, 10, 3) + yield axes.Series(3, 2, 4) + yield axes.Series(5, 10, 5, "HERTZ") + + +def get_axes(): + """ + Iterates through all of the practice axes defined in the functions above + + Yields + ------ + Cifti2 axis + """ + yield get_parcels() + yield get_scalar() + yield get_label() + for elem in get_brain_models(): + yield elem + for elem in get_series(): + yield elem + + +def test_brain_models(): + """ + Tests the introspection and creation of CIFTI2 BrainModel axes + """ + bml = list(get_brain_models()) + assert len(bml[0]) == 3 + assert (bml[0].vertex == -1).all() + assert (bml[0].voxel == [[0, 1, 2], [0, 4, 0], [0, 4, 2]]).all() + assert bml[0][1][0] == False + assert (bml[0][1][1] == [0, 4, 0]).all() + assert bml[0][1][2] == axes.BrainModel.to_cifti_brain_structure_name('thalamus_right') + assert len(bml[1]) == 4 + assert (bml[1].vertex == -1).all() + assert (bml[1].voxel == [[0, 0, 0], [0, 1, 2], [0, 4, 0], [0, 4, 2]]).all() + assert len(bml[2]) == 3 + assert (bml[2].voxel == -1).all() + assert (bml[2].vertex == [0, 5, 10]).all() + assert bml[2][1] == (True, 5, 'CIFTI_STRUCTURE_CORTEX_LEFT') + assert len(bml[3]) == 4 + assert (bml[3].voxel == -1).all() + assert (bml[3].vertex == [0, 5, 10, 13]).all() + assert bml[4][1] == (True, 9, 'CIFTI_STRUCTURE_CORTEX_RIGHT') + assert len(bml[4]) == 3 + assert (bml[4].voxel == -1).all() + assert (bml[4].vertex == [2, 9, 14]).all() + + for bm, label in zip(bml, ['ThalamusRight', 'Other', 'cortex_left', 'cortex']): + structures = list(bm.iter_structures()) + assert len(structures) == 1 + name = structures[0][0] + assert name == axes.BrainModel.to_cifti_brain_structure_name(label) + if 'CORTEX' in name: + assert bm.nvertices[name] == 15 + else: + assert name not in bm.nvertices + assert (bm.affine == rand_affine).all() + assert bm.volume_shape == vol_shape + + bmt = bml[0] + bml[1] + bml[2] + bml[3] + assert len(bmt) == 14 + structures = list(bmt.iter_structures()) + assert len(structures) == 4 + for bm, (name, _, bm_split) in zip(bml, structures): + assert bm == bm_split + assert (bm_split.name == name).all() + assert bm == bmt[bmt.name == bm.name[0]] + assert bm == bmt[np.where(bmt.name == bm.name[0])] + + bmt = bmt + bml[3] + assert len(bmt) == 18 + structures = list(bmt.iter_structures()) + assert len(structures) == 4 + assert len(structures[-1][2]) == 8 + + +def test_parcels(): + """ + Test the introspection and creation of CIFTI2 Parcel axes + """ + prc = get_parcels() + assert isinstance(prc, axes.Parcels) + assert prc['mixed'][0].shape == (3, 3) + assert len(prc['mixed'][1]) == 1 + assert prc['mixed'][1]['CIFTI_STRUCTURE_CORTEX_LEFT'].shape == (3, ) + + assert prc['volume'][0].shape == (4, 3) + assert len(prc['volume'][1]) == 0 + + assert prc['surface'][0].shape == (0, 3) + assert len(prc['surface'][1]) == 1 + assert prc['surface'][1]['CIFTI_STRUCTURE_CORTEX'].shape == (4, ) + + prc2 = prc + prc + assert len(prc2) == 6 + assert (prc2.affine == prc.affine).all() + assert (prc2.nvertices == prc.nvertices) + assert (prc2.volume_shape == prc.volume_shape) + assert prc2[:3] == prc + assert prc2[3:] == prc + + assert prc2[3:]['mixed'][0].shape == (3, 3) + assert len(prc2[3:]['mixed'][1]) == 1 + assert prc2[3:]['mixed'][1]['CIFTI_STRUCTURE_CORTEX_LEFT'].shape == (3, ) + + +def test_scalar(): + """ + Test the introspection and creation of CIFTI2 Scalar axes + """ + sc = get_scalar() + assert len(sc) == 3 + assert isinstance(sc, axes.Scalar) + assert (sc.name == ['one', 'two', 'three']).all() + assert sc[1] == ('two', {}) + sc2 = sc + sc + assert len(sc2) == 6 + assert (sc2.name == ['one', 'two', 'three', 'one', 'two', 'three']).all() + assert sc2[:3] == sc + assert sc2[3:] == sc + + +def test_series(): + """ + Test the introspection and creation of CIFTI2 Series axes + """ + sr = list(get_series()) + assert sr[0].unit == 'SECOND' + assert sr[1].unit == 'SECOND' + assert sr[2].unit == 'SECOND' + assert sr[3].unit == 'HERTZ' + + assert (sr[0].arr == np.arange(4) * 10 + 3).all() + assert (sr[1].arr == np.arange(3) * 10 + 8).all() + assert (sr[2].arr == np.arange(4) * 2 + 3).all() + assert ((sr[0] + sr[1]).arr == np.arange(7) * 10 + 3).all() + assert ((sr[1] + sr[0]).arr == np.arange(7) * 10 + 8).all() + assert ((sr[1] + sr[0] + sr[0]).arr == np.arange(11) * 10 + 8).all() + assert sr[1][2] == 28 + assert sr[1][-2] == sr[1].arr[-2] + assert_raises(ValueError, lambda: sr[0] + sr[2]) + assert_raises(ValueError, lambda: sr[2] + sr[1]) + assert_raises(ValueError, lambda: sr[0] + sr[3]) + assert_raises(ValueError, lambda: sr[3] + sr[1]) + assert_raises(ValueError, lambda: sr[3] + sr[2]) + + # test slicing + assert (sr[0][1:3].arr == sr[0].arr[1:3]).all() + assert (sr[0][1:].arr == sr[0].arr[1:]).all() + assert (sr[0][:-2].arr == sr[0].arr[:-2]).all() + assert (sr[0][1:-1].arr == sr[0].arr[1:-1]).all() + assert (sr[0][1:-1:2].arr == sr[0].arr[1:-1:2]).all() + assert (sr[0][::2].arr == sr[0].arr[::2]).all() + assert (sr[0][:10:2].arr == sr[0].arr[::2]).all() + assert (sr[0][10::-1].arr == sr[0].arr[::-1]).all() + assert (sr[0][3:1:-1].arr == sr[0].arr[3:1:-1]).all() + assert (sr[0][1:3:-1].arr == sr[0].arr[1:3:-1]).all() + + +def test_writing(): + """ + Tests the writing and reading back in of custom created CIFTI2 axes + """ + for ax1 in get_axes(): + for ax2 in get_axes(): + arr = np.random.randn(len(ax1), len(ax2)) + check_rewrite(arr, (ax1, ax2)) diff --git a/nibabel/cifti2/tests/test_io.py b/nibabel/cifti2/tests/test_io.py new file mode 100644 index 0000000000..22f4c27253 --- /dev/null +++ b/nibabel/cifti2/tests/test_io.py @@ -0,0 +1,176 @@ +from nibabel.cifti2 import cifti2_axes, cifti2 +from nibabel.tests.nibabel_data import get_nibabel_data, needs_nibabel_data +import nibabel as nib +import os +import numpy as np +import tempfile + +test_directory = os.path.join(get_nibabel_data(), 'nitest-cifti2') + +hcp_labels = ['CortexLeft', 'CortexRight', 'AccumbensLeft', 'AccumbensRight', 'AmygdalaLeft', 'AmygdalaRight', + 'brain_stem', 'CaudateLeft', 'CaudateRight', 'CerebellumLeft', 'CerebellumRight', + 'Diencephalon_ventral_left', 'Diencephalon_ventral_right', 'HippocampusLeft', 'HippocampusRight', + 'PallidumLeft', 'PallidumRight', 'PutamenLeft', 'PutamenRight', 'ThalamusLeft', 'ThalamusRight'] + +hcp_n_elements = [29696, 29716, 135, 140, 315, 332, 3472, 728, 755, 8709, 9144, 706, + 712, 764, 795, 297, 260, 1060, 1010, 1288, 1248] + +hcp_affine = np.array([[ -2., 0., 0., 90.], + [ 0., 2., 0., -126.], + [ 0., 0., 2., -72.], + [ 0., 0., 0., 1.]]) + + +def check_hcp_grayordinates(brain_model): + """Checks that a BrainModel matches the expected 32k HCP grayordinates + """ + assert isinstance(brain_model, cifti2_axes.BrainModel) + structures = list(brain_model.iter_structures()) + assert len(structures) == len(hcp_labels) + idx_start = 0 + for idx, (name, _, bm), label, nel in zip(range(len(structures)), structures, hcp_labels, hcp_n_elements): + if idx < 2: + assert name in bm.nvertices.keys() + assert (bm.voxel == -1).all() + assert (bm.vertex != -1).any() + assert bm.nvertices[name] == 32492 + else: + assert name not in bm.nvertices.keys() + assert (bm.voxel != -1).any() + assert (bm.vertex == -1).all() + assert (bm.affine == hcp_affine).all() + assert bm.volume_shape == (91, 109, 91) + assert name == cifti2_axes.BrainModel.to_cifti_brain_structure_name(label) + assert len(bm) == nel + assert (bm.arr == brain_model.arr[idx_start:idx_start + nel]).all() + idx_start += nel + assert idx_start == len(brain_model) + + assert (brain_model.arr[:5]['vertex'] == np.arange(5)).all() + assert structures[0][2].vertex[-1] == 32491 + assert structures[1][2].vertex[0] == 0 + assert structures[1][2].vertex[-1] == 32491 + assert (structures[-1][2].arr[-1] == brain_model.arr[-1]).all() + assert (brain_model.arr[-1]['voxel'] == [38, 55, 46]).all() + assert (brain_model.arr[70000]['voxel'] == [56, 22, 19]).all() + + +def check_Conte69(brain_model): + """Checks that the BrainModel matches the expected Conte69 surface coordinates + """ + assert isinstance(brain_model, cifti2_axes.BrainModel) + structures = list(brain_model.iter_structures()) + assert len(structures) == 2 + assert structures[0][0] == 'CIFTI_STRUCTURE_CORTEX_LEFT' + assert structures[0][2].is_surface.all() + assert structures[1][0] == 'CIFTI_STRUCTURE_CORTEX_RIGHT' + assert structures[1][2].is_surface.all() + assert (brain_model.voxel == -1).all() + + assert (brain_model.arr[:5]['vertex'] == np.arange(5)).all() + assert structures[0][2].vertex[-1] == 32491 + assert structures[1][2].vertex[0] == 0 + assert structures[1][2].vertex[-1] == 32491 + + +def check_rewrite(arr, axes, extension='.nii'): + """ + Checks wheter writing the Cifti2 array to disc and reading it back in gives the same object + + Parameters + ---------- + arr : array + N-dimensional array of data + axes : Sequence[cifti2_axes.Axis] + sequence of length N with the meaning of the rows/columns along each dimension + extension : str + custom extension to use + """ + (fd, name) = tempfile.mkstemp(extension) + cifti2.Cifti2Image(arr, header=axes).to_filename(name) + img = nib.load(name) + arr2 = img.get_data() + assert (arr == arr2).all() + for idx in range(len(img.shape)): + assert (axes[idx] == img.header.get_axis(idx)) + return img + + +@needs_nibabel_data('nitest-cifti2') +def test_read_ones(): + img = nib.load(os.path.join(test_directory, 'ones.dscalar.nii')) + arr = img.get_data() + axes = [img.header.get_axis(dim) for dim in range(2)] + assert (arr == 1).all() + assert isinstance(axes[0], cifti2_axes.Scalar) + assert len(axes[0]) == 1 + assert axes[0].name[0] == 'ones' + assert axes[0].meta[0] == {} + check_hcp_grayordinates(axes[1]) + img = check_rewrite(arr, axes) + check_hcp_grayordinates(img.header.get_axis(1)) + + +@needs_nibabel_data('nitest-cifti2') +def test_read_conte69_dscalar(): + img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dscalar.nii')) + arr = img.get_data() + axes = [img.header.get_axis(dim) for dim in range(2)] + assert isinstance(axes[0], cifti2_axes.Scalar) + assert len(axes[0]) == 2 + assert axes[0].name[0] == 'MyelinMap_BC_decurv' + assert axes[0].name[1] == 'corrThickness' + assert axes[0].meta[0] == {'PaletteColorMapping': '\n MODE_AUTO_SCALE_PERCENTAGE\n 98.000000 2.000000 2.000000 98.000000\n -100.000000 0.000000 0.000000 100.000000\n ROY-BIG-BL\n true\n true\n false\n true\n THRESHOLD_TEST_SHOW_OUTSIDE\n THRESHOLD_TYPE_OFF\n false\n -1.000000 1.000000\n -1.000000 1.000000\n -1.000000 1.000000\n \n PALETTE_THRESHOLD_RANGE_MODE_MAP\n'} + check_Conte69(axes[1]) + check_rewrite(arr, axes) + + +@needs_nibabel_data('nitest-cifti2') +def test_read_conte69_dtseries(): + img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dtseries.nii')) + arr = img.get_data() + axes = [img.header.get_axis(dim) for dim in range(2)] + assert isinstance(axes[0], cifti2_axes.Series) + assert len(axes[0]) == 2 + assert axes[0].start == 0 + assert axes[0].step == 1 + assert axes[0].size == arr.shape[0] + assert (axes[0].arr == [0, 1]).all() + check_Conte69(axes[1]) + check_rewrite(arr, axes) + + +@needs_nibabel_data('nitest-cifti2') +def test_read_conte69_dlabel(): + img = nib.load(os.path.join(test_directory, 'Conte69.parcellations_VGD11b.32k_fs_LR.dlabel.nii')) + arr = img.get_data() + axes = [img.header.get_axis(dim) for dim in range(2)] + assert isinstance(axes[0], cifti2_axes.Label) + assert len(axes[0]) == 3 + assert (axes[0].name == ['Composite Parcellation-lh (FRB08_OFP03_retinotopic)', + 'Brodmann lh (from colin.R via pals_R-to-fs_LR)', 'MEDIAL WALL lh (fs_LR)']).all() + assert axes[0].label[1][70] == ('19_B05', (1.0, 0.867, 0.467, 1.0)) + assert (axes[0].meta == [{}] * 3).all() + check_Conte69(axes[1]) + check_rewrite(arr, axes) + + +@needs_nibabel_data('nitest-cifti2') +def test_read_conte69_ptseries(): + img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.ptseries.nii')) + arr = img.get_data() + axes = [img.header.get_axis(dim) for dim in range(2)] + assert isinstance(axes[0], cifti2_axes.Series) + assert len(axes[0]) == 2 + assert axes[0].start == 0 + assert axes[0].step == 1 + assert axes[0].size == arr.shape[0] + assert (axes[0].arr == [0, 1]).all() + + assert len(axes[1]) == 54 + voxels, vertices = axes[1]['ER_FRB08'] + assert voxels.shape == (0, 3) + assert len(vertices) == 2 + assert vertices['CIFTI_STRUCTURE_CORTEX_LEFT'].shape == (206 // 2, ) + assert vertices['CIFTI_STRUCTURE_CORTEX_RIGHT'].shape == (206 // 2, ) + check_rewrite(arr, axes) diff --git a/nibabel/cifti2/tests/test_name.py b/nibabel/cifti2/tests/test_name.py new file mode 100644 index 0000000000..a73c5e8c46 --- /dev/null +++ b/nibabel/cifti2/tests/test_name.py @@ -0,0 +1,19 @@ +from nibabel.cifti2 import cifti2_axes + +equivalents = [('CIFTI_STRUCTURE_CORTEX_LEFT', ('CortexLeft', 'LeftCortex', 'left_cortex', 'Left Cortex', + 'Cortex_Left', 'cortex left', 'CORTEX_LEFT', 'LEFT CORTEX', + ('cortex', 'left'), ('CORTEX', 'Left'), ('LEFT', 'coRTEX'))), + ('CIFTI_STRUCTURE_CORTEX', ('Cortex', 'CortexBOTH', 'Cortex_both', 'both cortex', + 'BOTH_CORTEX', 'cortex', 'CORTEX', ('cortex', ), + ('COrtex', 'Both'), ('both', 'cortex')))] + + +def test_name_conversion(): + """ + Tests the automatic name conversion to a format recognized by CIFTI2 + """ + func = cifti2_axes.BrainModel.to_cifti_brain_structure_name + for base_name, input_names in equivalents: + assert base_name == func(base_name) + for name in input_names: + assert base_name == func(name) \ No newline at end of file From 0e7566a1dff794b60cbbefb67bcb791e6bee22f5 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Mon, 25 Jun 2018 14:27:29 +0100 Subject: [PATCH 007/689] Clarified the test filenames distinguishes between io using the raw header or using the new Cifti2 axes --- nibabel/cifti2/tests/test_axes.py | 2 +- nibabel/cifti2/tests/{test_io.py => test_cifti2io_axes.py} | 0 .../cifti2/tests/{test_cifti2io.py => test_cifti2io_header.py} | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename nibabel/cifti2/tests/{test_io.py => test_cifti2io_axes.py} (100%) rename nibabel/cifti2/tests/{test_cifti2io.py => test_cifti2io_header.py} (100%) diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 64e94f2663..d7b24f03ec 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -1,6 +1,6 @@ import numpy as np from nose.tools import assert_raises -from .test_io import check_rewrite +from .test_cifti2io_axes import check_rewrite import nibabel.cifti2.cifti2_axes as axes diff --git a/nibabel/cifti2/tests/test_io.py b/nibabel/cifti2/tests/test_cifti2io_axes.py similarity index 100% rename from nibabel/cifti2/tests/test_io.py rename to nibabel/cifti2/tests/test_cifti2io_axes.py diff --git a/nibabel/cifti2/tests/test_cifti2io.py b/nibabel/cifti2/tests/test_cifti2io_header.py similarity index 100% rename from nibabel/cifti2/tests/test_cifti2io.py rename to nibabel/cifti2/tests/test_cifti2io_header.py From 671c1967c5d2ba56b30fc9a4c831ba7f3183fd3d Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Mon, 25 Jun 2018 14:31:16 +0100 Subject: [PATCH 008/689] BUG: removed failing doctest --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 3125b58404..9f1992b5d6 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -537,7 +537,7 @@ class Parcels(Axis): (N, ) array with the actual get_parcels (each of which is a BrainModel object) Individual get_parcels can also be accessed based on their name, using - >>> parcel = parcel_axis[name] + ``parcel = parcel_axis[name]`` """ _use_dtype = np.dtype([('name', 'U60'), ('voxels', 'object'), ('vertices', 'object')]) _affine = None From 1dd059de2da6593ba25d062ef0b6b527d5084ea1 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Mon, 28 Jan 2019 15:00:38 +0000 Subject: [PATCH 009/689] DOC: explained that Axis is an abstract class --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 9f1992b5d6..09faf0e341 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -54,7 +54,7 @@ def to_header(axes): class Axis(object): """ - Generic object describing the rows or columns of a CIFTI vector/matrix + Abstract class for any object describing the rows or columns of a CIFTI vector/matrix Attributes ---------- From 8660afdcc01601ccfe39763c24ce3d9dc428a450 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 14 Mar 2019 17:38:05 +0000 Subject: [PATCH 010/689] RF: removed the typed array underlying all axes Also added a few tests to increase coverage --- nibabel/cifti2/cifti2_axes.py | 579 ++++++++++++--------- nibabel/cifti2/tests/test_axes.py | 51 +- nibabel/cifti2/tests/test_cifti2io_axes.py | 16 +- 3 files changed, 387 insertions(+), 259 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 09faf0e341..fc970d5717 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -2,6 +2,7 @@ from nibabel.cifti2 import cifti2 from six import string_types from operator import xor +from abc import ABC, abstractmethod def from_mapping(mim): @@ -52,55 +53,38 @@ def to_header(axes): return cifti2.Cifti2Header(matrix) -class Axis(object): +class Axis(ABC): """ Abstract class for any object describing the rows or columns of a CIFTI vector/matrix - Attributes - ---------- - arr : np.ndarray - (N, ) typed array with the actual information on each row/column + Mainly used for type checking. """ - _use_dtype = None - arr = None - def __init__(self, arr): - self.arr = np.asarray(arr, dtype=self._use_dtype) + @property + def size(self, ): + return len(self) - def get_element(self, index): + @abstractmethod + def __len__(self): + pass + + @abstractmethod + def __eq__(self, other): """ - Extracts a single element from the axis + Compares whether two Axes are equal Parameters ---------- - index : int - Indexes the row/column of interest + other : Axis + other axis to compare to Returns ------- - Description of the row/column + False if the axes don't have the same type or if their content differs """ - return self.arr[index] - - def __getitem__(self, item): - if isinstance(item, int): - return self.get_element(item) - if isinstance(item, string_types): - raise IndexError("Can not index an Axis with a string (except for Parcels)") - return type(self)(self.arr[item]) - - @property - def size(self, ): - return self.arr.size - - def __len__(self): - return self.size - - def __eq__(self, other): - return (type(self) == type(other) and - len(self) == len(other) and - (self.arr == other.arr).all()) + pass + @abstractmethod def __add__(self, other): """ Concatenates two Axes of the same type @@ -114,9 +98,13 @@ def __add__(self, other): ------- Axis of the same subtype as self and other """ - if type(self) == type(other): - return type(self)(np.append(self.arr, other.arr)) - return NotImplemented + pass + + @abstractmethod + def __getitem__(self, item): + """ + Extracts definition of single row/column or new Axis describing a subset of the rows/columns + """ class BrainModel(Axis): @@ -127,45 +115,67 @@ class BrainModel(Axis): Attributes ---------- + name : np.ndarray + (N, ) array with the brain structure objects voxel : np.ndarray (N, 3) array with the voxel indices vertex : np.ndarray (N, ) array with the vertex indices - name : np.ndarray - (N, ) array with the brain structure objects """ - _use_dtype = np.dtype([('vertex', 'i4'), ('voxel', ('i4', 3)), - ('name', 'U%i' % max(len(name) for name in cifti2.CIFTI_BRAIN_STRUCTURES))]) _affine = None _volume_shape = None + _nvertices = None + _name = None - def __init__(self, arr, affine=None, volume_shape=None, nvertices=None): + def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None, nvertices=None): """ Creates a new BrainModel axis defining the vertices and voxels represented by each row/column + A more convenient way to create BrainModel axes is provided by the factory methods: + - `from_mask`: creates a surface or volumetric BrainModel axis from respectively 1D and 3D masks + - `from_surface`: creates a volumetric BrainModel axis + + The resulting BrainModel axes can be concatenated by adding them together. + Parameters ---------- - arr : np.ndarray - (N, ) structured array with for every element a tuple with 3 elements: - - vertex index (-1 for voxels) - - 3 voxel indices (-1 for vertices) - - string (name of brain structure) + name : str or np.ndarray + brain structure name or (N, ) array with the brain structure names + voxel : np.ndarray + (N, 3) array with the voxel indices (can be omitted for CIFTI files only covering the surface) + vertex : np.ndarray + (N, ) array with the vertex indices (can be omitted for volumetric CIFTI files) affine : np.ndarray (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) volume_shape : Tuple[int, int, int] shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) nvertices : dict[String -> int] - maps names of surface elements to integers + maps names of surface elements to integers (not needed for volumetric CIFTI files) """ - super(BrainModel, self).__init__(arr) - self.name = self.name # correct names to CIFTI brain structures + if voxel is None: + if vertex is None: + raise ValueError("Voxel and vertex indices not defined") + nelements = len(vertex) + self.voxel = -np.ones((nelements, 3), dtype=int) + else: + nelements = len(voxel) + self.voxel = np.asarray(voxel, dtype=int) + + self.vertex = -np.ones(nelements, dtype=int) if vertex is None else np.asarray(vertex, dtype=int) + + if isinstance(name, string_types): + name = [self.to_cifti_brain_structure_name(name)] * self.vertex.size + self.name = np.asarray(name, dtype='U') + if nvertices is None: self.nvertices = {} else: self.nvertices = dict(nvertices) + for name in list(self.nvertices.keys()): if name not in self.name: del self.nvertices[name] + if self.is_surface.all(): self.affine = None self.volume_shape = None @@ -173,6 +183,17 @@ def __init__(self, arr, affine=None, volume_shape=None, nvertices=None): self.affine = affine self.volume_shape = volume_shape + if (self.vertex[self.is_surface] < 0).any(): + raise ValueError('Undefined vertex indices found for surface elements') + if (self.voxel[~self.is_surface] < 0).any(): + raise ValueError('Undefined voxel indices found for volumetric elements') + + for check_name in ('name', 'voxel', 'vertex'): + shape = (self.size, 3) if check_name == 'voxel' else (self.size, ) + if getattr(self, check_name).shape != shape: + raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( + check_name, getattr(self, check_name).shape)) + @classmethod def from_mapping(cls, mim): """ @@ -187,20 +208,21 @@ def from_mapping(cls, mim): BrainModel """ nbm = np.sum([bm.index_count for bm in mim.brain_models]) - arr = np.zeros(nbm, dtype=cls._use_dtype) - arr['voxel'] = -1 - arr['vertex'] = -1 + voxel = -np.ones((nbm, 3)) + vertex = -np.ones(nbm) + name = [] + nvertices = {} affine, shape = None, None for bm in mim.brain_models: index_end = bm.index_offset + bm.index_count is_surface = bm.model_type == 'CIFTI_MODEL_TYPE_SURFACE' - arr['name'][bm.index_offset: index_end] = bm.brain_structure + name.extend([bm.brain_structure] * bm.index_count) if is_surface: - arr['vertex'][bm.index_offset: index_end] = bm.vertex_indices + vertex[bm.index_offset: index_end] = bm.vertex_indices nvertices[bm.brain_structure] = bm.surface_number_of_vertices else: - arr['voxel'][bm.index_offset: index_end, :] = bm.voxel_indices_ijk + voxel[bm.index_offset: index_end, :] = bm.voxel_indices_ijk if affine is None: shape = mim.volume.volume_dimensions affine = mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix @@ -209,7 +231,7 @@ def from_mapping(cls, mim): raise ValueError("All volume masks should be defined in the same volume") if (affine != mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix).any(): raise ValueError("All volume masks should have the same affine") - return cls(arr, affine, shape, nvertices) + return cls(name, voxel, vertex, affine, shape, nvertices) @classmethod def from_mask(cls, mask, name='other', affine=None): @@ -239,11 +261,7 @@ def from_mask(cls, mask, name='other', affine=None): return cls.from_surface(np.where(mask != 0)[0], mask.size, name=name) elif mask.ndim == 3: voxels = np.array(np.where(mask != 0)).T - arr = np.zeros(len(voxels), dtype=cls._use_dtype) - arr['vertex'] = -1 - arr['voxel'] = voxels - arr['name'] = cls.to_cifti_brain_structure_name(name) - return cls(arr, affine=affine, volume_shape=mask.shape) + return cls(name, voxel=voxels, affine=affine, volume_shape=mask.shape) else: raise ValueError("Mask should be either 1-dimensional (for surfaces) or " "3-dimensional (for volumes), not %i-dimensional" % mask.ndim) @@ -266,11 +284,9 @@ def from_surface(cls, vertices, nvertex, name='Cortex'): ------- BrainModel which covers (part of) the surface """ - arr = np.zeros(len(vertices), dtype=cls._use_dtype) - arr['voxel'] = -1 - arr['vertex'] = vertices - arr['name'] = cls.to_cifti_brain_structure_name(name) - return cls(arr, nvertices={arr['name'][0]: nvertex}) + cifti_name = cls.to_cifti_brain_structure_name(name) + return cls(cifti_name, vertex=vertices, + nvertices={cifti_name: nvertex}) def get_element(self, index): """ @@ -288,10 +304,9 @@ def get_element(self, index): - vertex index if it is a surface element, otherwise array with 3 voxel indices - structure.BrainStructure object describing the brain structure the element was taken from """ - elem = self.arr[index] - is_surface = elem['name'] in self.nvertices.keys() + is_surface = self.name[index] in self.nvertices.keys() name = 'vertex' if is_surface else 'voxel' - return is_surface, elem[name], elem['name'] + return is_surface, getattr(self, name)[index], self.name[index] def to_mapping(self, dim): """ @@ -348,6 +363,9 @@ def iter_structures(self, ): @property def affine(self, ): + """ + Affine of the volumetric image in which the greyordinate voxels were defined + """ return self._affine @affine.setter @@ -360,6 +378,9 @@ def affine(self, value): @property def volume_shape(self, ): + """ + Shape of the volumetric image in which the greyordinate voxels were defined + """ return self._volume_shape @volume_shape.setter @@ -368,43 +389,26 @@ def volume_shape(self, value): value = tuple(value) if len(value) != 3: raise ValueError("Volume shape should be a tuple of length 3") + if not all(isinstance(v, int) for v in value): + raise ValueError("All elements of the volume shape should be integers") self._volume_shape = value @property def is_surface(self, ): - """True for any element on the surface - """ - return np.vectorize(lambda name: name in self.nvertices.keys())(self.name) - - @property - def voxel(self, ): - """The voxel represented by each row or column """ - return self.arr['voxel'] - - @voxel.setter - def voxel(self, values): - self.arr['voxel'] = values - - @property - def vertex(self, ): - """The vertex represented by each row or column + (N, ) boolean array which is true for any element on the surface """ - return self.arr['vertex'] - - @vertex.setter - def vertex(self, values): - self.arr['vertex'] = values + return np.vectorize(lambda name: name in self.nvertices.keys())(self.name) @property def name(self, ): """The brain structure to which the voxel/vertices of belong """ - return self.arr['name'] + return self._name @name.setter def name(self, values): - self.arr['name'] = [self.to_cifti_brain_structure_name(name) for name in values] + self._name = np.array([self.to_cifti_brain_structure_name(name) for name in values]) @staticmethod def to_cifti_brain_structure_name(name): @@ -474,23 +478,47 @@ def to_cifti_brain_structure_name(name): (name, proposed_name)) return proposed_name + def __len__(self ): + return self.name.size + def __getitem__(self, item): + """ + Extracts part of the brain structure + + Parameters + ---------- + item : anything that can index a 1D array + + Returns + ------- + If `item` is an integer returns a tuple with 3 elements: + - boolean, which is True if it is a surface element + - vertex index if it is a surface element, otherwise array with 3 voxel indices + - structure.BrainStructure object describing the brain structure the element was taken from + + Otherwise returns a new BrainModel + """ if isinstance(item, int): return self.get_element(item) if isinstance(item, string_types): raise IndexError("Can not index an Axis with a string (except for Parcels)") - return type(self)(self.arr[item], self.affine, self.volume_shape, self.nvertices) + return type(self)(self.name[item], self.voxel[item], self.vertex[item], + self.affine, self.volume_shape, self.nvertices) def __eq__(self, other): - if type(self) != type(other) or len(self) != len(other): + if not isinstance(other, BrainModel) or len(self) != len(other): return False if xor(self.affine is None, other.affine is None): return False - return (((self.affine is None and other.affine is None) or + return ( + ((self.affine is None and other.affine is None) or (abs(self.affine - other.affine).max() < 1e-8 and self.volume_shape == other.volume_shape)) and (self.nvertices == other.nvertices) and - (self.arr == other.arr).all()) + (self.name == other.name).all() and + (self.voxel[~self.is_surface] == other.voxel[~other.is_surface]).all() and + (self.vertex[~self.is_surface] == other.vertex[~other.is_surface]).all() + ) def __add__(self, other): """ @@ -505,7 +533,7 @@ def __add__(self, other): ------- BrainModel """ - if type(self) == type(other): + if isinstance(other, BrainModel): if self.affine is None: affine, shape = other.affine, other.volume_shape else: @@ -519,7 +547,12 @@ def __add__(self, other): raise ValueError("Trying to concatenate two BrainModels with inconsistent number of vertices for %s" % name) nvertices[name] = value - return type(self)(np.append(self.arr, other.arr), affine, shape, nvertices) + return type(self)( + np.append(self.name, other.name), + np.concatenate((self.voxel, other.voxel), 0), + np.append(self.vertex, other.vertex), + affine, shape, nvertices + ) return NotImplemented @@ -529,39 +562,34 @@ class Parcels(Axis): This Axis describes which parcel is represented by each row/column. - Attributes - ---------- - name : np.ndarray - (N, ) string array with the parcel names - parcel : np.ndarray - (N, ) array with the actual get_parcels (each of which is a BrainModel object) - - Individual get_parcels can also be accessed based on their name, using + Individual parcels can be accessed based on their name, using ``parcel = parcel_axis[name]`` """ - _use_dtype = np.dtype([('name', 'U60'), ('voxels', 'object'), ('vertices', 'object')]) _affine = None _volume_shape = None - def __init__(self, arr, affine=None, volume_shape=None, nvertices=None): + def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvertices=None): """ Creates a new BrainModel axis defining the vertices and voxels represented by each row/column Parameters ---------- - arr : np.ndarray - (N, ) structured array with for every element a tuple with 3 elements: - - string (name of parcel) - - (M, 3) int array with the M voxel indices in the parcel - - Dict[String -> (K, ) int array] mapping surface brain structure names to vertex indices + name : np.ndarray + (N, ) string array with the parcel names + voxels : np.ndarray + (N, ) object array each containing a sequence of voxels + vertices : np.ndarray + (N, ) object array each containing a sequence of vertices affine : np.ndarray (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) volume_shape : Tuple[int, int, int] shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) nvertices : dict[String -> int] - maps names of surface elements to integers + maps names of surface elements to integers (not needed for volumetric CIFTI files) """ - super(Parcels, self).__init__(arr) + self.name = np.asarray(name, dtype='U') + self.voxels = np.asarray(voxels, dtype='object') + self.vertices = np.asarray(vertices, dtype='object') self.affine = affine self.volume_shape = volume_shape if nvertices is None: @@ -569,6 +597,11 @@ def __init__(self, arr, affine=None, volume_shape=None, nvertices=None): else: self.nvertices = dict(nvertices) + for check_name in ('name', 'voxels', 'vertices'): + if getattr(self, check_name).shape != (self.size, ): + raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( + check_name, getattr(self, check_name).shape)) + @classmethod def from_brain_models(cls, named_brain_models): """ @@ -585,9 +618,13 @@ def from_brain_models(cls, named_brain_models): """ affine = None volume_shape = None - arr = np.zeros(len(named_brain_models), dtype=cls._use_dtype) + all_names = [] + all_voxels = [] + all_vertices = [] nvertices = {} for idx_parcel, (parcel_name, bm) in enumerate(named_brain_models): + all_names.append(parcel_name) + voxels = bm.voxel[~bm.is_surface] if voxels.shape[0] != 0: if affine is None: @@ -597,6 +634,8 @@ def from_brain_models(cls, named_brain_models): if (affine != bm.affine).any() or (volume_shape != bm.volume_shape): raise ValueError( "Can not combine brain models defined in different volumes into a single Parcel axis") + all_voxels.append(voxels) + vertices = {} for name, _, bm_part in bm.iter_structures(): if name in bm.nvertices.keys(): @@ -604,8 +643,8 @@ def from_brain_models(cls, named_brain_models): raise ValueError("Got multiple conflicting number of vertices for surface structure %s" % name) nvertices[name] = bm.nvertices[name] vertices[name] = bm_part.vertex - arr[idx_parcel] = (parcel_name, voxels, vertices) - return Parcels(arr, affine, volume_shape, nvertices) + all_vertices.append(vertices) + return Parcels(all_names, all_voxels, all_vertices, affine, volume_shape, nvertices) @classmethod def from_mapping(cls, mim): @@ -621,7 +660,10 @@ def from_mapping(cls, mim): Parcels """ nparcels = len(list(mim.parcels)) - arr = np.zeros(nparcels, dtype=cls._use_dtype) + all_names = [] + all_voxels = np.zeros(nparcels, dtype='object') + all_vertices = np.zeros(nparcels, dtype='object') + volume_shape = None if mim.volume is None else mim.volume.volume_dimensions affine = None if mim.volume is None else mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix nvertices = {} @@ -638,10 +680,10 @@ def from_mapping(cls, mim): vertices[vertex.brain_structure] = np.array(vertex) if name not in nvertices.keys(): raise ValueError("Number of vertices for surface structure %s not defined" % name) - arr[idx_parcel]['voxels'] = voxels - arr[idx_parcel]['vertices'] = vertices - arr[idx_parcel]['name'] = parcel.name - return cls(arr, affine, volume_shape, nvertices) + all_voxels[idx_parcel] = voxels + all_vertices[idx_parcel] = vertices + all_names.append(parcel.name) + return cls(all_names, all_voxels, all_vertices, affine, volume_shape, nvertices) def to_mapping(self, dim): """ @@ -662,7 +704,7 @@ def to_mapping(self, dim): mim.volume = cifti2.Cifti2Volume(self.volume_shape, affine) for name, nvertex in self.nvertices.items(): mim.append(cifti2.Cifti2Surface(name, nvertex)) - for name, voxels, vertices in self.arr: + for name, voxels, vertices in zip(self.name, self.voxels, self.vertices): cifti_voxels = cifti2.Cifti2VoxelIndicesIJK(voxels) element = cifti2.Cifti2Parcel(name, cifti_voxels) for name, idx_vertices in vertices.items(): @@ -712,31 +754,16 @@ def volume_shape(self, value): raise ValueError("Volume shape should be a tuple of length 3") self._volume_shape = value - @property - def name(self, ): - return self.arr['name'] - - @name.setter - def name(self, values): - self.arr['name'] = values - - @property - def voxels(self, ): - return self.arr['voxels'] - - @voxels.setter - def voxels(self, values): - self.arr['voxels'] = values - - @property - def vertices(self, ): - return self.arr['vertices'] - - @vertices.setter - def vertices(self, values): - self.arr['vertices'] = values + def __len__(self, ): + return self.name.size def __getitem__(self, item): + """ + Extracts subset of the axes based on the type of ``item``: + - `int`: 3-element tuple of (parcel name, parcel voxels, parcel vertices) + - `string`: 2-element tuple of (parcel voxels, parcel vertices + - other object that can index 1D arrays: new Parcel axis + """ if isinstance(item, string_types): idx = np.where(self.name == item)[0] if len(idx) == 0: @@ -746,9 +773,8 @@ def __getitem__(self, item): return self.voxels[idx[0]], self.vertices[idx[0]] if isinstance(item, int): return self.get_element(item) - if isinstance(item, string_types): - raise IndexError("Can not index an Axis with a string (except for Parcels)") - return type(self)(self.arr[item], self.affine, self.volume_shape, self.nvertices) + return type(self)(self.name[item], self.voxels[item], self.vertices[item], + self.affine, self.volume_shape, self.nvertices) def __eq__(self, other): if (type(self) != type(other) or len(self) != len(other) or @@ -797,33 +823,41 @@ def __add__(self, other): raise ValueError("Trying to concatenate two Parcels with inconsistent number of vertices for %s" % name) nvertices[name] = value - return type(self)(np.append(self.arr, other.arr), affine, shape, nvertices) + return type(self)( + np.append(self.name, other.name), + np.append(self.voxels, other.voxels), + np.append(self.vertices, other.vertices), + affine, shape, nvertices + ) return NotImplemented class Scalar(Axis): """ Along this axis of the CIFTI vector/matrix each row/column has been given a unique name and optionally metadata - - Attributes - ---------- - name : np.ndarray - (N, ) string array with the parcel names - meta : np.ndarray - (N, ) array with a dictionary of metadata for each row/column """ _use_dtype = np.dtype([('name', 'U60'), ('meta', 'object')]) - def __init__(self, arr): + def __init__(self, name, meta=None): """ Creates a new Scalar axis from (name, meta-data) pairs Parameters ---------- - arr : Iterable[Tuple[str, dict[str -> str]] - iterates over all rows/columns assigning a name and a dictionary of metadata to each + name : np.ndarray + (N, ) string array with the parcel names + meta : np.ndarray + (N, ) object array with a dictionary of metadata for each row/column. Defaults to empty dictionary """ - super(Scalar, self).__init__(arr) + self.name = np.asarray(name, dtype='U') + if meta is None: + meta = [{} for _ in range(self.name.size)] + self.meta = np.asarray(meta, dtype='object') + + for check_name in ('name', 'meta'): + if getattr(self, check_name).shape != (self.size, ): + raise ValueError("Input {} has incorrect shape ({}) for Scalar axis".format( + check_name, getattr(self, check_name).shape)) @classmethod def from_mapping(cls, mim): @@ -838,29 +872,9 @@ def from_mapping(cls, mim): ------- Scalar """ - res = np.zeros(len(list(mim.named_maps)), dtype=cls._use_dtype) - res['name'] = [nm.map_name for nm in mim.named_maps] - res['meta'] = [{} if nm.metadata is None else dict(nm.metadata) for nm in mim.named_maps] - return cls(res) - - @classmethod - def from_names(cls, names): - """ - Creates a new get_scalar axis with the given row/column names - - Parameters - ---------- - names : List[str] - gives a unique name to every row/column in the matrix - - Returns - ------- - Scalar - """ - res = np.zeros(len(names), dtype=cls._use_dtype) - res['name'] = names - res['meta'] = [{} for _ in names] - return cls(res) + names = [nm.map_name for nm in mim.named_maps] + meta = [{} if nm.metadata is None else dict(nm.metadata) for nm in mim.named_maps] + return cls(names, meta) def to_mapping(self, dim): """ @@ -876,9 +890,9 @@ def to_mapping(self, dim): cifti2.Cifti2MatrixIndicesMap """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_SCALARS') - for elem in self.arr: - meta = None if len(elem['meta']) == 0 else elem['meta'] - named_map = cifti2.Cifti2NamedMap(elem['name'], cifti2.Cifti2MetaData(meta)) + for name, meta in zip(self.name, self.meta): + meta = None if len(meta) == 0 else meta + named_map = cifti2.Cifti2NamedMap(name, cifti2.Cifti2MetaData(meta)) mim.append(named_map) return mim @@ -897,7 +911,7 @@ def get_element(self, index): - unicode name of the get_scalar - dictionary with the element metadata """ - return self.arr['name'][index], self.arr['meta'][index] + return self.name[index], self.meta[index] def to_label(self, labels): """ @@ -905,7 +919,7 @@ def to_label(self, labels): Parameters ---------- - labels : list[dict] + labels : list[dict] or dict mapping from integers to (name, (R, G, B, A)), where `name` is a string and R, G, B, and A are floats between 0 and 1 giving the colour and alpha (transparency) @@ -913,56 +927,86 @@ def to_label(self, labels): ------- Label """ - res = np.zeros(self.size, dtype=Label._use_dtype) - res['name'] = self.arr['name'] - res['meta'] = self.arr['meta'] - res['get_label'] = labels - return Label(res) + if isinstance(labels, dict): + labels = [dict(labels) for _ in range(self.size)] + return Label(self.name, labels, self.meta) - @property - def name(self, ): - return self.arr['name'] + def __eq__(self, other): + """ + Compares two Scalars - @name.setter - def name(self, values): - self.arr['name'] = values + Parameters + ---------- + other : Scalar + scalar axis to be compared - @property - def meta(self, ): - return self.arr['meta'] + Returns + ------- + bool : False if type, length or content do not match + """ + if not isinstance(other, Scalar) or self.size != other.size: + return False + return (self.name == other.name).all() and (self.meta == other.meta).all() + + def __len__(self, ): + return self.name.size + + def __add__(self, other): + """ + Concatenates two Scalars + + Parameters + ---------- + other : Scalar + scalar axis to be appended to the current one + + Returns + ------- + Scalar + """ + if not isinstance(other, Scalar): + return NotImplemented + return Scalar( + np.append(self.name, other.name), + np.append(self.meta, other.meta), + ) - @meta.setter - def meta(self, values): - self.arr['meta'] = values + def __getitem__(self, item): + if isinstance(item, int): + return self.get_element(item) + return type(self)(self.name[item], self.meta[item]) class Label(Axis): """ Along this axis of the CIFTI vector/matrix each row/column has been given a unique name, get_label table, and optionally metadata - - Attributes - ---------- - name : np.ndarray - (N, ) string array with the parcel names - meta : np.ndarray - (N, ) array with a dictionary of metadata for each row/column - get_label : sp.ndarray - (N, ) array with dictionaries mapping integer values to get_label names and RGBA colors """ - _use_dtype = np.dtype([('name', 'U60'), ('get_label', 'object'), ('meta', 'object')]) - def __init__(self, arr): + def __init__(self, name, label, meta): """ - Creates a new Scalar axis from (name, meta-data) pairs + Creates a new Label axis from (name, meta-data) pairs Parameters ---------- - arr : Iterable[Tuple[str, dict[int -> (str, (float, float, float, float)), dict(str->str)]] - iterates over all rows/columns assigning a name, dictionary mapping integers to get_label names and rgba colours - and a dictionary of metadata to each + name : np.ndarray + (N, ) string array with the parcel names + meta : np.ndarray + (N, ) object array with a dictionary of metadata for each row/column + label : np.ndarray + (N, ) object array with dictionaries mapping integer values to get_label names and RGBA colors """ - super(Label, self).__init__(arr) + self.name = np.asarray(name, dtype='U') + self.meta = np.asarray(meta, dtype='object') + self.label = np.asarray(label, dtype='object') + + for check_name in ('name', 'meta', 'label'): + if getattr(self, check_name).shape != (self.size, ): + raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( + check_name, getattr(self, check_name).shape)) + + def __len__(self, ): + return self.name.size @classmethod def from_mapping(cls, mim): @@ -995,12 +1039,13 @@ def to_mapping(self, dim): cifti2.Cifti2MatrixIndicesMap """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_LABELS') - for elem in self.arr: + for name, label, meta in zip(self.name, self.label, self.meta): label_table = cifti2.Cifti2LabelTable() - for key, value in elem['get_label'].items(): + for key, value in label.items(): label_table[key] = (value[0],) + tuple(value[1]) - meta = None if len(elem['meta']) == 0 else elem['meta'] - named_map = cifti2.Cifti2NamedMap(elem['name'], cifti2.Cifti2MetaData(meta), + if len(meta) == 0: + meta = None + named_map = cifti2.Cifti2NamedMap(name, cifti2.Cifti2MetaData(meta), label_table) mim.append(named_map) return mim @@ -1021,31 +1066,50 @@ def get_element(self, index): - dictionary with the get_label table - dictionary with the element metadata """ - return self.arr['name'][index], self.arr['get_label'][index], self.arr['meta'][index] + return self.name[index], self.label[index], self.meta[index] - @property - def name(self, ): - return self.arr['name'] + def __add__(self, other): + """ + Concatenates two Labels - @name.setter - def name(self, values): - self.arr['name'] = values + Parameters + ---------- + other : Label + scalar axis to be appended to the current one - @property - def meta(self, ): - return self.arr['meta'] + Returns + ------- + Label + """ + if not isinstance(other, Label): + return NotImplemented + return Label( + np.append(self.name, other.name), + np.append(self.label, other.label), + np.append(self.meta, other.meta), + ) - @meta.setter - def meta(self, values): - self.arr['meta'] = values + def __eq__(self, other): + """ + Compares two Labels - @property - def label(self, ): - return self.arr['get_label'] + Parameters + ---------- + other : Label + label axis to be compared + + Returns + ------- + bool : False if type, length or content do not match + """ + if not isinstance(other, Label) or self.size != other.size: + return False + return (self.name == other.name).all() and (self.meta == other.meta).all() and (self.label == other.label).all() - @label.setter - def label(self, values): - self.arr['get_label'] = values + def __getitem__(self, item): + if isinstance(item, int): + return self.get_element(item) + return type(self)(self.name[item], self.label[item], self.meta[item]) class Series(Axis): @@ -1154,9 +1218,9 @@ def extend(self, other_axis): Series """ if other_axis.step != self.step: - raise ValueError('Can only concatenate get_series with the same step size') + raise ValueError('Can only concatenate Series with the same step size') if other_axis.unit != self.unit: - raise ValueError('Can only concatenate get_series with the same unit') + raise ValueError('Can only concatenate Series with the same unit') return Series(self.start, self.step, self.size + other_axis.size, self.unit) def __getitem__(self, item): @@ -1175,7 +1239,7 @@ def __getitem__(self, item): nelements = (idx_end - idx_start) // step if nelements < 0: nelements = 0 - return Series(idx_start * self.step + self.start, self.step * step, nelements) + return Series(idx_start * self.step + self.start, self.step * step, nelements, self.unit) elif isinstance(item, int): return self.get_element(item) raise IndexError('Series can only be indexed with integers or slices without breaking the regular structure') @@ -1222,3 +1286,18 @@ def __add__(self, other): if isinstance(other, Series): return self.extend(other) return NotImplemented + + def __eq__(self, other): + """ + True if start, step, size, and unit are the same. + """ + return ( + isinstance(other, Series) and + self.start == other.start and + self.step == other.step and + self.size == other.size and + self.unit == other.unit + ) + + def __len__(self): + return self.size diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index d7b24f03ec..ee52cb160a 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -6,6 +6,7 @@ rand_affine = np.random.randn(4, 4) vol_shape = (5, 10, 3) +use_label = {0: ('something', (0.2, 0.4, 0.1, 0.5)), 1: ('even better', (0.3, 0.8, 0.43, 0.9))} def get_brain_models(): @@ -52,7 +53,7 @@ def get_scalar(): ------- Scalar axis """ - return axes.Scalar.from_names(['one', 'two', 'three']) + return axes.Scalar(['one', 'two', 'three']) def get_label(): @@ -63,8 +64,8 @@ def get_label(): ------- Label axis """ - return axes.Scalar.from_names(['one', 'two', 'three']).to_label({0: ('something', (0.2, 0.4, 0.1, 0.5)), - 1: ('even better', (0.3, 0.8, 0.43, 0.9))}) + return axes.Scalar(['one', 'two', 'three']).to_label(use_label) + def get_series(): """ @@ -190,14 +191,36 @@ def test_scalar(): assert len(sc) == 3 assert isinstance(sc, axes.Scalar) assert (sc.name == ['one', 'two', 'three']).all() + assert (sc.meta == [{}] * 3).all() assert sc[1] == ('two', {}) sc2 = sc + sc assert len(sc2) == 6 assert (sc2.name == ['one', 'two', 'three', 'one', 'two', 'three']).all() + assert (sc2.meta == [{}] * 6).all() assert sc2[:3] == sc assert sc2[3:] == sc +def test_label(): + """ + Test the introspection and creation of CIFTI2 Scalar axes + """ + lab = get_label() + assert len(lab) == 3 + assert isinstance(lab, axes.Label) + assert (lab.name == ['one', 'two', 'three']).all() + assert (lab.meta == [{}] * 3).all() + assert (lab.label == [use_label] * 3).all() + assert lab[1] == ('two', use_label, {}) + lab2 = lab + lab + assert len(lab2) == 6 + assert (lab2.name == ['one', 'two', 'three', 'one', 'two', 'three']).all() + assert (lab2.meta == [{}] * 6).all() + assert (lab2.label == [use_label] * 6).all() + assert lab2[:3] == lab + assert lab2[3:] == lab + + def test_series(): """ Test the introspection and creation of CIFTI2 Series axes @@ -243,3 +266,25 @@ def test_writing(): for ax2 in get_axes(): arr = np.random.randn(len(ax1), len(ax2)) check_rewrite(arr, (ax1, ax2)) + + +def test_common_interface(): + """ + Tests the common interface for all custom created CIFTI2 axes + """ + for axis1, axis2 in zip(get_axes(), get_axes()): + assert axis1 == axis2 + concatenated = axis1 + axis2 + assert axis1 != concatenated + print(type(axis1)) + if isinstance(axis1, axes.Series): + print(concatenated.start, axis1.start) + print(concatenated[:axis1.size].start, axis1.start) + assert axis1 == concatenated[:axis1.size] + if isinstance(axis1, axes.Series): + assert axis2 != concatenated[axis1.size:] + else: + assert axis2 == concatenated[axis1.size:] + + assert len(axis1) == axis1.size + diff --git a/nibabel/cifti2/tests/test_cifti2io_axes.py b/nibabel/cifti2/tests/test_cifti2io_axes.py index 22f4c27253..4a3aa8d111 100644 --- a/nibabel/cifti2/tests/test_cifti2io_axes.py +++ b/nibabel/cifti2/tests/test_cifti2io_axes.py @@ -42,17 +42,21 @@ def check_hcp_grayordinates(brain_model): assert bm.volume_shape == (91, 109, 91) assert name == cifti2_axes.BrainModel.to_cifti_brain_structure_name(label) assert len(bm) == nel - assert (bm.arr == brain_model.arr[idx_start:idx_start + nel]).all() + assert (bm.name == brain_model.name[idx_start:idx_start + nel]).all() + assert (bm.voxel == brain_model.voxel[idx_start:idx_start + nel]).all() + assert (bm.vertex == brain_model.vertex[idx_start:idx_start + nel]).all() idx_start += nel assert idx_start == len(brain_model) - assert (brain_model.arr[:5]['vertex'] == np.arange(5)).all() + assert (brain_model.vertex[:5] == np.arange(5)).all() assert structures[0][2].vertex[-1] == 32491 assert structures[1][2].vertex[0] == 0 assert structures[1][2].vertex[-1] == 32491 - assert (structures[-1][2].arr[-1] == brain_model.arr[-1]).all() - assert (brain_model.arr[-1]['voxel'] == [38, 55, 46]).all() - assert (brain_model.arr[70000]['voxel'] == [56, 22, 19]).all() + assert structures[-1][2].name[-1] == brain_model.name[-1] + assert (structures[-1][2].voxel[-1] == brain_model.voxel[-1]).all() + assert structures[-1][2].vertex[-1] == brain_model.vertex[-1] + assert (brain_model.voxel[-1] == [38, 55, 46]).all() + assert (brain_model.voxel[70000] == [56, 22, 19]).all() def check_Conte69(brain_model): @@ -67,7 +71,7 @@ def check_Conte69(brain_model): assert structures[1][2].is_surface.all() assert (brain_model.voxel == -1).all() - assert (brain_model.arr[:5]['vertex'] == np.arange(5)).all() + assert (brain_model.vertex[:5] == np.arange(5)).all() assert structures[0][2].vertex[-1] == 32491 assert structures[1][2].vertex[0] == 0 assert structures[1][2].vertex[-1] == 32491 From c2274c52967c80c4d1a93dbc4eedbadcf9271b7a Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 14 Mar 2019 17:56:10 +0000 Subject: [PATCH 011/689] RF: increased consistency of methods in the Axis objects - removed last reference of `arr` (replace with `time`) - removed spurious print statements in test --- nibabel/cifti2/cifti2_axes.py | 528 +++++++++++---------- nibabel/cifti2/tests/test_axes.py | 38 +- nibabel/cifti2/tests/test_cifti2io_axes.py | 4 +- 3 files changed, 285 insertions(+), 285 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index fc970d5717..e99507052a 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -122,10 +122,6 @@ class BrainModel(Axis): vertex : np.ndarray (N, ) array with the vertex indices """ - _affine = None - _volume_shape = None - _nvertices = None - _name = None def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None, nvertices=None): """ @@ -194,45 +190,6 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( check_name, getattr(self, check_name).shape)) - @classmethod - def from_mapping(cls, mim): - """ - Creates a new BrainModel axis based on a CIFTI dataset - - Parameters - ---------- - mim : cifti2.Cifti2MatrixIndicesMap - - Returns - ------- - BrainModel - """ - nbm = np.sum([bm.index_count for bm in mim.brain_models]) - voxel = -np.ones((nbm, 3)) - vertex = -np.ones(nbm) - name = [] - - nvertices = {} - affine, shape = None, None - for bm in mim.brain_models: - index_end = bm.index_offset + bm.index_count - is_surface = bm.model_type == 'CIFTI_MODEL_TYPE_SURFACE' - name.extend([bm.brain_structure] * bm.index_count) - if is_surface: - vertex[bm.index_offset: index_end] = bm.vertex_indices - nvertices[bm.brain_structure] = bm.surface_number_of_vertices - else: - voxel[bm.index_offset: index_end, :] = bm.voxel_indices_ijk - if affine is None: - shape = mim.volume.volume_dimensions - affine = mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix - else: - if shape != mim.volume.volume_dimensions: - raise ValueError("All volume masks should be defined in the same volume") - if (affine != mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix).any(): - raise ValueError("All volume masks should have the same affine") - return cls(name, voxel, vertex, affine, shape, nvertices) - @classmethod def from_mask(cls, mask, name='other', affine=None): """ @@ -288,25 +245,44 @@ def from_surface(cls, vertices, nvertex, name='Cortex'): return cls(cifti_name, vertex=vertices, nvertices={cifti_name: nvertex}) - def get_element(self, index): + @classmethod + def from_mapping(cls, mim): """ - Describes a single element from the axis + Creates a new BrainModel axis based on a CIFTI dataset Parameters ---------- - index : int - Indexes the row/column of interest + mim : cifti2.Cifti2MatrixIndicesMap Returns ------- - tuple with 3 elements - - boolean, which is True if it is a surface element - - vertex index if it is a surface element, otherwise array with 3 voxel indices - - structure.BrainStructure object describing the brain structure the element was taken from + BrainModel """ - is_surface = self.name[index] in self.nvertices.keys() - name = 'vertex' if is_surface else 'voxel' - return is_surface, getattr(self, name)[index], self.name[index] + nbm = np.sum([bm.index_count for bm in mim.brain_models]) + voxel = -np.ones((nbm, 3)) + vertex = -np.ones(nbm) + name = [] + + nvertices = {} + affine, shape = None, None + for bm in mim.brain_models: + index_end = bm.index_offset + bm.index_count + is_surface = bm.model_type == 'CIFTI_MODEL_TYPE_SURFACE' + name.extend([bm.brain_structure] * bm.index_count) + if is_surface: + vertex[bm.index_offset: index_end] = bm.vertex_indices + nvertices[bm.brain_structure] = bm.surface_number_of_vertices + else: + voxel[bm.index_offset: index_end, :] = bm.voxel_indices_ijk + if affine is None: + shape = mim.volume.volume_dimensions + affine = mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix + else: + if shape != mim.volume.volume_dimensions: + raise ValueError("All volume masks should be defined in the same volume") + if (affine != mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix).any(): + raise ValueError("All volume masks should have the same affine") + return cls(name, voxel, vertex, affine, shape, nvertices) def to_mapping(self, dim): """ @@ -361,55 +337,6 @@ def iter_structures(self, ): start_name = self.name[idx_start] yield start_name, slice(idx_start, None), self[idx_start:] - @property - def affine(self, ): - """ - Affine of the volumetric image in which the greyordinate voxels were defined - """ - return self._affine - - @affine.setter - def affine(self, value): - if value is not None: - value = np.asarray(value) - if value.shape != (4, 4): - raise ValueError('Affine transformation should be a 4x4 array') - self._affine = value - - @property - def volume_shape(self, ): - """ - Shape of the volumetric image in which the greyordinate voxels were defined - """ - return self._volume_shape - - @volume_shape.setter - def volume_shape(self, value): - if value is not None: - value = tuple(value) - if len(value) != 3: - raise ValueError("Volume shape should be a tuple of length 3") - if not all(isinstance(v, int) for v in value): - raise ValueError("All elements of the volume shape should be integers") - self._volume_shape = value - - @property - def is_surface(self, ): - """ - (N, ) boolean array which is true for any element on the surface - """ - return np.vectorize(lambda name: name in self.nvertices.keys())(self.name) - - @property - def name(self, ): - """The brain structure to which the voxel/vertices of belong - """ - return self._name - - @name.setter - def name(self, values): - self._name = np.array([self.to_cifti_brain_structure_name(name) for name in values]) - @staticmethod def to_cifti_brain_structure_name(name): """ @@ -478,32 +405,63 @@ def to_cifti_brain_structure_name(name): (name, proposed_name)) return proposed_name - def __len__(self ): - return self.name.size + @property + def is_surface(self, ): + """ + (N, ) boolean array which is true for any element on the surface + """ + return np.vectorize(lambda name: name in self.nvertices.keys())(self.name) - def __getitem__(self, item): + _affine = None + + @property + def affine(self, ): """ - Extracts part of the brain structure + Affine of the volumetric image in which the greyordinate voxels were defined + """ + return self._affine - Parameters - ---------- - item : anything that can index a 1D array + @affine.setter + def affine(self, value): + if value is not None: + value = np.asarray(value) + if value.shape != (4, 4): + raise ValueError('Affine transformation should be a 4x4 array') + self._affine = value - Returns - ------- - If `item` is an integer returns a tuple with 3 elements: - - boolean, which is True if it is a surface element - - vertex index if it is a surface element, otherwise array with 3 voxel indices - - structure.BrainStructure object describing the brain structure the element was taken from + _volume_shape = None - Otherwise returns a new BrainModel + @property + def volume_shape(self, ): """ - if isinstance(item, int): - return self.get_element(item) - if isinstance(item, string_types): - raise IndexError("Can not index an Axis with a string (except for Parcels)") - return type(self)(self.name[item], self.voxel[item], self.vertex[item], - self.affine, self.volume_shape, self.nvertices) + Shape of the volumetric image in which the greyordinate voxels were defined + """ + return self._volume_shape + + @volume_shape.setter + def volume_shape(self, value): + if value is not None: + value = tuple(value) + if len(value) != 3: + raise ValueError("Volume shape should be a tuple of length 3") + if not all(isinstance(v, int) for v in value): + raise ValueError("All elements of the volume shape should be integers") + self._volume_shape = value + + _name = None + + @property + def name(self, ): + """The brain structure to which the voxel/vertices of belong + """ + return self._name + + @name.setter + def name(self, values): + self._name = np.array([self.to_cifti_brain_structure_name(name) for name in values]) + + def __len__(self ): + return self.name.size def __eq__(self, other): if not isinstance(other, BrainModel) or len(self) != len(other): @@ -555,6 +513,50 @@ def __add__(self, other): ) return NotImplemented + def __getitem__(self, item): + """ + Extracts part of the brain structure + + Parameters + ---------- + item : anything that can index a 1D array + + Returns + ------- + If `item` is an integer returns a tuple with 3 elements: + - boolean, which is True if it is a surface element + - vertex index if it is a surface element, otherwise array with 3 voxel indices + - structure.BrainStructure object describing the brain structure the element was taken from + + Otherwise returns a new BrainModel + """ + if isinstance(item, int): + return self.get_element(item) + if isinstance(item, string_types): + raise IndexError("Can not index an Axis with a string (except for Parcels)") + return type(self)(self.name[item], self.voxel[item], self.vertex[item], + self.affine, self.volume_shape, self.nvertices) + + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 3 elements + - boolean, which is True if it is a surface element + - vertex index if it is a surface element, otherwise array with 3 voxel indices + - structure.BrainStructure object describing the brain structure the element was taken from + """ + is_surface = self.name[index] in self.nvertices.keys() + name = 'vertex' if is_surface else 'voxel' + return is_surface, getattr(self, name)[index], self.name[index] + class Parcels(Axis): """ @@ -565,8 +567,6 @@ class Parcels(Axis): Individual parcels can be accessed based on their name, using ``parcel = parcel_axis[name]`` """ - _affine = None - _volume_shape = None def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvertices=None): """ @@ -633,7 +633,7 @@ def from_brain_models(cls, named_brain_models): else: if (affine != bm.affine).any() or (volume_shape != bm.volume_shape): raise ValueError( - "Can not combine brain models defined in different volumes into a single Parcel axis") + "Can not combine brain models defined in different volumes into a single Parcel axis") all_voxels.append(voxels) vertices = {} @@ -712,23 +712,7 @@ def to_mapping(self, dim): mim.append(element) return mim - def get_element(self, index): - """ - Describes a single element from the axis - - Parameters - ---------- - index : int - Indexes the row/column of interest - - Returns - ------- - tuple with 3 elements - - unicode name of the parcel - - (M, 3) int array with voxel indices - - Dict[String -> (K, ) int array] with vertex indices for a specific surface brain structure - """ - return self.name[index], self.voxels[index], self.vertices[index] + _affine = None @property def affine(self, ): @@ -742,6 +726,8 @@ def affine(self, value): raise ValueError('Affine transformation should be a 4x4 array') self._affine = value + _volume_shape = None + @property def volume_shape(self, ): return self._volume_shape @@ -757,25 +743,6 @@ def volume_shape(self, value): def __len__(self, ): return self.name.size - def __getitem__(self, item): - """ - Extracts subset of the axes based on the type of ``item``: - - `int`: 3-element tuple of (parcel name, parcel voxels, parcel vertices) - - `string`: 2-element tuple of (parcel voxels, parcel vertices - - other object that can index 1D arrays: new Parcel axis - """ - if isinstance(item, string_types): - idx = np.where(self.name == item)[0] - if len(idx) == 0: - raise IndexError("Parcel %s not found" % item) - if len(idx) > 1: - raise IndexError("Multiple get_parcels with name %s found" % item) - return self.voxels[idx[0]], self.vertices[idx[0]] - if isinstance(item, int): - return self.get_element(item) - return type(self)(self.name[item], self.voxels[item], self.vertices[item], - self.affine, self.volume_shape, self.nvertices) - def __eq__(self, other): if (type(self) != type(other) or len(self) != len(other) or (self.name != other.name).all() or self.nvertices != other.nvertices or @@ -783,8 +750,8 @@ def __eq__(self, other): return False if self.affine is not None: if ( other.affine is None or - abs(self.affine - other.affine).max() > 1e-8 or - self.volume_shape != other.volume_shape): + abs(self.affine - other.affine).max() > 1e-8 or + self.volume_shape != other.volume_shape): return False elif other.affine is not None: return False @@ -815,7 +782,7 @@ def __add__(self, other): else: affine, shape = self.affine, self.volume_shape if other.affine is not None and ((other.affine != affine).all() or - other.volume_shape != shape): + other.volume_shape != shape): raise ValueError("Trying to concatenate two Parcels defined in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): @@ -831,12 +798,48 @@ def __add__(self, other): ) return NotImplemented + def __getitem__(self, item): + """ + Extracts subset of the axes based on the type of ``item``: + - `int`: 3-element tuple of (parcel name, parcel voxels, parcel vertices) + - `string`: 2-element tuple of (parcel voxels, parcel vertices + - other object that can index 1D arrays: new Parcel axis + """ + if isinstance(item, string_types): + idx = np.where(self.name == item)[0] + if len(idx) == 0: + raise IndexError("Parcel %s not found" % item) + if len(idx) > 1: + raise IndexError("Multiple get_parcels with name %s found" % item) + return self.voxels[idx[0]], self.vertices[idx[0]] + if isinstance(item, int): + return self.get_element(item) + return type(self)(self.name[item], self.voxels[item], self.vertices[item], + self.affine, self.volume_shape, self.nvertices) + + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 3 elements + - unicode name of the parcel + - (M, 3) int array with voxel indices + - Dict[String -> (K, ) int array] with vertex indices for a specific surface brain structure + """ + return self.name[index], self.voxels[index], self.vertices[index] + class Scalar(Axis): """ Along this axis of the CIFTI vector/matrix each row/column has been given a unique name and optionally metadata """ - _use_dtype = np.dtype([('name', 'U60'), ('meta', 'object')]) def __init__(self, name, meta=None): """ @@ -896,23 +899,6 @@ def to_mapping(self, dim): mim.append(named_map) return mim - def get_element(self, index): - """ - Describes a single element from the axis - - Parameters - ---------- - index : int - Indexes the row/column of interest - - Returns - ------- - tuple with 2 elements - - unicode name of the get_scalar - - dictionary with the element metadata - """ - return self.name[index], self.meta[index] - def to_label(self, labels): """ Creates a new Label axis based on the Scalar axis @@ -931,6 +917,9 @@ def to_label(self, labels): labels = [dict(labels) for _ in range(self.size)] return Label(self.name, labels, self.meta) + def __len__(self, ): + return self.name.size + def __eq__(self, other): """ Compares two Scalars @@ -948,9 +937,6 @@ def __eq__(self, other): return False return (self.name == other.name).all() and (self.meta == other.meta).all() - def __len__(self, ): - return self.name.size - def __add__(self, other): """ Concatenates two Scalars @@ -976,6 +962,23 @@ def __getitem__(self, item): return self.get_element(item) return type(self)(self.name[item], self.meta[item]) + def get_element(self, index): + """ + Describes a single element from the axis + + Parameters + ---------- + index : int + Indexes the row/column of interest + + Returns + ------- + tuple with 2 elements + - unicode name of the get_scalar + - dictionary with the element metadata + """ + return self.name[index], self.meta[index] + class Label(Axis): """ @@ -1005,9 +1008,6 @@ def __init__(self, name, label, meta): raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( check_name, getattr(self, check_name).shape)) - def __len__(self, ): - return self.name.size - @classmethod def from_mapping(cls, mim): """ @@ -1050,23 +1050,25 @@ def to_mapping(self, dim): mim.append(named_map) return mim - def get_element(self, index): + def __len__(self, ): + return self.name.size + + def __eq__(self, other): """ - Describes a single element from the axis + Compares two Labels Parameters ---------- - index : int - Indexes the row/column of interest + other : Label + label axis to be compared Returns ------- - tuple with 2 elements - - unicode name of the get_scalar - - dictionary with the get_label table - - dictionary with the element metadata + bool : False if type, length or content do not match """ - return self.name[index], self.label[index], self.meta[index] + if not isinstance(other, Label) or self.size != other.size: + return False + return (self.name == other.name).all() and (self.meta == other.meta).all() and (self.label == other.label).all() def __add__(self, other): """ @@ -1089,27 +1091,28 @@ def __add__(self, other): np.append(self.meta, other.meta), ) - def __eq__(self, other): + def __getitem__(self, item): + if isinstance(item, int): + return self.get_element(item) + return type(self)(self.name[item], self.label[item], self.meta[item]) + + def get_element(self, index): """ - Compares two Labels + Describes a single element from the axis Parameters ---------- - other : Label - label axis to be compared + index : int + Indexes the row/column of interest Returns ------- - bool : False if type, length or content do not match + tuple with 2 elements + - unicode name of the get_scalar + - dictionary with the get_label table + - dictionary with the element metadata """ - if not isinstance(other, Label) or self.size != other.size: - return False - return (self.name == other.name).all() and (self.meta == other.meta).all() and (self.label == other.label).all() - - def __getitem__(self, item): - if isinstance(item, int): - return self.get_element(item) - return type(self)(self.name[item], self.label[item], self.meta[item]) + return self.name[index], self.label[index], self.meta[index] class Series(Axis): @@ -1128,7 +1131,6 @@ class Series(Axis): number of time points """ size = None - _unit = None def __init__(self, start, step, size, unit="SECOND"): """ @@ -1151,17 +1153,7 @@ def __init__(self, start, step, size, unit="SECOND"): self.size = size @property - def unit(self, ): - return self._unit - - @unit.setter - def unit(self, value): - if value.upper() not in ("SECOND", "HERTZ", "METER", "RADIAN"): - raise ValueError("Series unit should be one of ('second', 'hertz', 'meter', or 'radian'") - self._unit = value.upper() - - @property - def arr(self, ): + def time(self, ): return np.arange(self.size) * self.step + self.start @classmethod @@ -1202,6 +1194,18 @@ def to_mapping(self, dim): mim.series_unit = self.unit return mim + _unit = None + + @property + def unit(self, ): + return self._unit + + @unit.setter + def unit(self, value): + if value.upper() not in ("SECOND", "HERTZ", "METER", "RADIAN"): + raise ValueError("Series unit should be one of ('second', 'hertz', 'meter', or 'radian'") + self._unit = value.upper() + def extend(self, other_axis): """ Concatenates two get_series @@ -1223,6 +1227,45 @@ def extend(self, other_axis): raise ValueError('Can only concatenate Series with the same unit') return Series(self.start, self.step, self.size + other_axis.size, self.unit) + def __len__(self): + return self.size + + def __eq__(self, other): + """ + True if start, step, size, and unit are the same. + """ + return ( + isinstance(other, Series) and + self.start == other.start and + self.step == other.step and + self.size == other.size and + self.unit == other.unit + ) + + def __add__(self, other): + """ + Concatenates two Series + + Parameters + ---------- + other : Series + Time get_series to append at the end of the current time get_series. + Note that the starting time of the other time get_series is ignored. + + Returns + ------- + Series + New time get_series with the concatenation of the two + + Raises + ------ + ValueError + raised if the repetition time of the two time get_series is different + """ + if isinstance(other, Series): + return self.extend(other) + return NotImplemented + def __getitem__(self, item): if isinstance(item, slice): step = 1 if item.step is None else item.step @@ -1262,42 +1305,3 @@ def get_element(self, index): if index >= self.size: raise IndexError("index %i is out of range for get_series with size %i" % (index, self.size)) return self.start + self.step * index - - def __add__(self, other): - """ - Concatenates two Series - - Parameters - ---------- - other : Series - Time get_series to append at the end of the current time get_series. - Note that the starting time of the other time get_series is ignored. - - Returns - ------- - Series - New time get_series with the concatenation of the two - - Raises - ------ - ValueError - raised if the repetition time of the two time get_series is different - """ - if isinstance(other, Series): - return self.extend(other) - return NotImplemented - - def __eq__(self, other): - """ - True if start, step, size, and unit are the same. - """ - return ( - isinstance(other, Series) and - self.start == other.start and - self.step == other.step and - self.size == other.size and - self.unit == other.unit - ) - - def __len__(self): - return self.size diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index ee52cb160a..eccdf7aecc 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -231,14 +231,14 @@ def test_series(): assert sr[2].unit == 'SECOND' assert sr[3].unit == 'HERTZ' - assert (sr[0].arr == np.arange(4) * 10 + 3).all() - assert (sr[1].arr == np.arange(3) * 10 + 8).all() - assert (sr[2].arr == np.arange(4) * 2 + 3).all() - assert ((sr[0] + sr[1]).arr == np.arange(7) * 10 + 3).all() - assert ((sr[1] + sr[0]).arr == np.arange(7) * 10 + 8).all() - assert ((sr[1] + sr[0] + sr[0]).arr == np.arange(11) * 10 + 8).all() + assert (sr[0].time == np.arange(4) * 10 + 3).all() + assert (sr[1].time == np.arange(3) * 10 + 8).all() + assert (sr[2].time == np.arange(4) * 2 + 3).all() + assert ((sr[0] + sr[1]).time == np.arange(7) * 10 + 3).all() + assert ((sr[1] + sr[0]).time == np.arange(7) * 10 + 8).all() + assert ((sr[1] + sr[0] + sr[0]).time == np.arange(11) * 10 + 8).all() assert sr[1][2] == 28 - assert sr[1][-2] == sr[1].arr[-2] + assert sr[1][-2] == sr[1].time[-2] assert_raises(ValueError, lambda: sr[0] + sr[2]) assert_raises(ValueError, lambda: sr[2] + sr[1]) assert_raises(ValueError, lambda: sr[0] + sr[3]) @@ -246,16 +246,16 @@ def test_series(): assert_raises(ValueError, lambda: sr[3] + sr[2]) # test slicing - assert (sr[0][1:3].arr == sr[0].arr[1:3]).all() - assert (sr[0][1:].arr == sr[0].arr[1:]).all() - assert (sr[0][:-2].arr == sr[0].arr[:-2]).all() - assert (sr[0][1:-1].arr == sr[0].arr[1:-1]).all() - assert (sr[0][1:-1:2].arr == sr[0].arr[1:-1:2]).all() - assert (sr[0][::2].arr == sr[0].arr[::2]).all() - assert (sr[0][:10:2].arr == sr[0].arr[::2]).all() - assert (sr[0][10::-1].arr == sr[0].arr[::-1]).all() - assert (sr[0][3:1:-1].arr == sr[0].arr[3:1:-1]).all() - assert (sr[0][1:3:-1].arr == sr[0].arr[1:3:-1]).all() + assert (sr[0][1:3].time == sr[0].time[1:3]).all() + assert (sr[0][1:].time == sr[0].time[1:]).all() + assert (sr[0][:-2].time == sr[0].time[:-2]).all() + assert (sr[0][1:-1].time == sr[0].time[1:-1]).all() + assert (sr[0][1:-1:2].time == sr[0].time[1:-1:2]).all() + assert (sr[0][::2].time == sr[0].time[::2]).all() + assert (sr[0][:10:2].time == sr[0].time[::2]).all() + assert (sr[0][10::-1].time == sr[0].time[::-1]).all() + assert (sr[0][3:1:-1].time == sr[0].time[3:1:-1]).all() + assert (sr[0][1:3:-1].time == sr[0].time[1:3:-1]).all() def test_writing(): @@ -276,10 +276,6 @@ def test_common_interface(): assert axis1 == axis2 concatenated = axis1 + axis2 assert axis1 != concatenated - print(type(axis1)) - if isinstance(axis1, axes.Series): - print(concatenated.start, axis1.start) - print(concatenated[:axis1.size].start, axis1.start) assert axis1 == concatenated[:axis1.size] if isinstance(axis1, axes.Series): assert axis2 != concatenated[axis1.size:] diff --git a/nibabel/cifti2/tests/test_cifti2io_axes.py b/nibabel/cifti2/tests/test_cifti2io_axes.py index 4a3aa8d111..fee3605ce4 100644 --- a/nibabel/cifti2/tests/test_cifti2io_axes.py +++ b/nibabel/cifti2/tests/test_cifti2io_axes.py @@ -139,7 +139,7 @@ def test_read_conte69_dtseries(): assert axes[0].start == 0 assert axes[0].step == 1 assert axes[0].size == arr.shape[0] - assert (axes[0].arr == [0, 1]).all() + assert (axes[0].time == [0, 1]).all() check_Conte69(axes[1]) check_rewrite(arr, axes) @@ -169,7 +169,7 @@ def test_read_conte69_ptseries(): assert axes[0].start == 0 assert axes[0].step == 1 assert axes[0].size == arr.shape[0] - assert (axes[0].arr == [0, 1]).all() + assert (axes[0].time == [0, 1]).all() assert len(axes[1]) == 54 voxels, vertices = axes[1]['ER_FRB08'] From 52aff8ae37161685cba4483619c387e804b01ef1 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 14 Mar 2019 18:06:32 +0000 Subject: [PATCH 012/689] ENH: made it easier to create Label axis from constructor directly Removed Scalar.to_label, because it was no longer needed --- nibabel/cifti2/cifti2_axes.py | 34 ++++++++++--------------------- nibabel/cifti2/tests/test_axes.py | 2 +- 2 files changed, 12 insertions(+), 24 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index e99507052a..ec8203b6ea 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -899,24 +899,6 @@ def to_mapping(self, dim): mim.append(named_map) return mim - def to_label(self, labels): - """ - Creates a new Label axis based on the Scalar axis - - Parameters - ---------- - labels : list[dict] or dict - mapping from integers to (name, (R, G, B, A)), where `name` is a string and R, G, B, and A are floats - between 0 and 1 giving the colour and alpha (transparency) - - Returns - ------- - Label - """ - if isinstance(labels, dict): - labels = [dict(labels) for _ in range(self.size)] - return Label(self.name, labels, self.meta) - def __len__(self, ): return self.name.size @@ -986,7 +968,7 @@ class Label(Axis): get_label table, and optionally metadata """ - def __init__(self, name, label, meta): + def __init__(self, name, label, meta=None): """ Creates a new Label axis from (name, meta-data) pairs @@ -994,14 +976,19 @@ def __init__(self, name, label, meta): ---------- name : np.ndarray (N, ) string array with the parcel names + label : np.ndarray + single dictionary or (N, ) object array with dictionaries mapping from integers to (name, (R, G, B, A)), + where name is a string and R, G, B, and A are floats between 0 and 1 giving the colour and alpha meta : np.ndarray (N, ) object array with a dictionary of metadata for each row/column - label : np.ndarray - (N, ) object array with dictionaries mapping integer values to get_label names and RGBA colors """ self.name = np.asarray(name, dtype='U') - self.meta = np.asarray(meta, dtype='object') + if isinstance(label, dict): + label = [label] * self.name.size self.label = np.asarray(label, dtype='object') + if meta is None: + meta = [{} for _ in range(self.name.size)] + self.meta = np.asarray(meta, dtype='object') for check_name in ('name', 'meta', 'label'): if getattr(self, check_name).shape != (self.size, ): @@ -1023,7 +1010,8 @@ def from_mapping(cls, mim): """ tables = [{key: (value.label, value.rgba) for key, value in nm.label_table.items()} for nm in mim.named_maps] - return Scalar.from_mapping(mim).to_label(tables) + rest = Scalar.from_mapping(mim) + return Label(rest.name, tables, rest.meta) def to_mapping(self, dim): """ diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index eccdf7aecc..4f7c9f1dea 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -64,7 +64,7 @@ def get_label(): ------- Label axis """ - return axes.Scalar(['one', 'two', 'three']).to_label(use_label) + return axes.Label(['one', 'two', 'three'], use_label) def get_series(): From dad74eee9c0fb3d5dcddc89d1d9312d684f9d06e Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 14 Mar 2019 18:27:26 +0000 Subject: [PATCH 013/689] REF: made flake8 happy --- nibabel/cifti2/cifti2_axes.py | 119 ++++++++++++++++++++++------------ 1 file changed, 78 insertions(+), 41 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index ec8203b6ea..97946e22cf 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -123,12 +123,14 @@ class BrainModel(Axis): (N, ) array with the vertex indices """ - def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None, nvertices=None): + def __init__(self, name, voxel=None, vertex=None, affine=None, + volume_shape=None, nvertices=None): """ - Creates a new BrainModel axis defining the vertices and voxels represented by each row/column + Creates a BrainModel axis defining the vertices and voxels represented by each row/column A more convenient way to create BrainModel axes is provided by the factory methods: - - `from_mask`: creates a surface or volumetric BrainModel axis from respectively 1D and 3D masks + - `from_mask`: creates surface or volumetric BrainModel axis from respectively + 1D or 3D masks - `from_surface`: creates a volumetric BrainModel axis The resulting BrainModel axes can be concatenated by adding them together. @@ -138,13 +140,16 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None name : str or np.ndarray brain structure name or (N, ) array with the brain structure names voxel : np.ndarray - (N, 3) array with the voxel indices (can be omitted for CIFTI files only covering the surface) + (N, 3) array with the voxel indices (can be omitted for CIFTI files only + covering the surface) vertex : np.ndarray (N, ) array with the vertex indices (can be omitted for volumetric CIFTI files) affine : np.ndarray - (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) + (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only + covering the surface) volume_shape : Tuple[int, int, int] - shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) + shape of the volume in which the voxels were defined (not needed for CIFTI files only + covering the surface) nvertices : dict[String -> int] maps names of surface elements to integers (not needed for volumetric CIFTI files) """ @@ -157,7 +162,10 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None nelements = len(voxel) self.voxel = np.asarray(voxel, dtype=int) - self.vertex = -np.ones(nelements, dtype=int) if vertex is None else np.asarray(vertex, dtype=int) + if vertex is None: + self.vertex = -np.ones(nelements, dtype=int) + else: + self.vertex = np.asarray(vertex, dtype=int) if isinstance(name, string_types): name = [self.to_cifti_brain_structure_name(name)] * self.vertex.size @@ -280,7 +288,10 @@ def from_mapping(cls, mim): else: if shape != mim.volume.volume_dimensions: raise ValueError("All volume masks should be defined in the same volume") - if (affine != mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix).any(): + if ( + affine != + mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix + ).any(): raise ValueError("All volume masks should have the same affine") return cls(name, voxel, vertex, affine, shape, nvertices) @@ -309,11 +320,13 @@ def to_mapping(self, dim): vertices = None nvertex = None if mim.volume is None: - affine = cifti2.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ(-3, matrix=self.affine) + affine = cifti2.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ(-3, self.affine) mim.volume = cifti2.Cifti2Volume(self.volume_shape, affine) - cifti_bm = cifti2.Cifti2BrainModel(to_slice.start, len(bm), - 'CIFTI_MODEL_TYPE_SURFACE' if is_surface else 'CIFTI_MODEL_TYPE_VOXELS', - name, nvertex, voxels, vertices) + cifti_bm = cifti2.Cifti2BrainModel( + to_slice.start, len(bm), + 'CIFTI_MODEL_TYPE_SURFACE' if is_surface else 'CIFTI_MODEL_TYPE_VOXELS', + name, nvertex, voxels, vertices + ) mim.append(cifti_bm) return mim @@ -401,8 +414,8 @@ def to_cifti_brain_structure_name(name): else: proposed_name = 'CIFTI_STRUCTURE_%s_%s' % (structure.upper(), orientation.upper()) if proposed_name not in cifti2.CIFTI_BRAIN_STRUCTURES: - raise ValueError('%s was interpreted as %s, which is not a valid CIFTI brain structure' % - (name, proposed_name)) + raise ValueError('%s was interpreted as %s, which is not a valid CIFTI brain structure' + % (name, proposed_name)) return proposed_name @property @@ -460,7 +473,7 @@ def name(self, ): def name(self, values): self._name = np.array([self.to_cifti_brain_structure_name(name) for name in values]) - def __len__(self ): + def __len__(self): return self.name.size def __eq__(self, other): @@ -496,14 +509,17 @@ def __add__(self, other): affine, shape = other.affine, other.volume_shape else: affine, shape = self.affine, self.volume_shape - if other.affine is not None and ((other.affine != affine).all() or - other.volume_shape != shape): - raise ValueError("Trying to concatenate two BrainModels defined in a different brain volume") + if other.affine is not None and ( + (other.affine != affine).all() or + other.volume_shape != shape + ): + raise ValueError("Trying to concatenate two BrainModels defined " + + "in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two BrainModels with inconsistent number of vertices for %s" - % name) + raise ValueError("Trying to concatenate two BrainModels with inconsistent " + + "number of vertices for %s" % name) nvertices[name] = value return type(self)( np.append(self.name, other.name), @@ -570,7 +586,7 @@ class Parcels(Axis): def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvertices=None): """ - Creates a new BrainModel axis defining the vertices and voxels represented by each row/column + Creates a Parcels axis defining the vertices and voxels represented by each row/column Parameters ---------- @@ -581,9 +597,11 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert vertices : np.ndarray (N, ) object array each containing a sequence of vertices affine : np.ndarray - (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) + (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only + covering the surface) volume_shape : Tuple[int, int, int] - shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) + shape of the volume in which the voxels were defined (not needed for CIFTI files only + covering the surface) nvertices : dict[String -> int] maps names of surface elements to integers (not needed for volumetric CIFTI files) """ @@ -632,15 +650,16 @@ def from_brain_models(cls, named_brain_models): volume_shape = bm.volume_shape else: if (affine != bm.affine).any() or (volume_shape != bm.volume_shape): - raise ValueError( - "Can not combine brain models defined in different volumes into a single Parcel axis") + raise ValueError("Can not combine brain models defined in different " + + "volumes into a single Parcel axis") all_voxels.append(voxels) vertices = {} for name, _, bm_part in bm.iter_structures(): if name in bm.nvertices.keys(): if name in nvertices.keys() and nvertices[name] != bm.nvertices[name]: - raise ValueError("Got multiple conflicting number of vertices for surface structure %s" % name) + raise ValueError("Got multiple conflicting number of " + + "vertices for surface structure %s" % name) nvertices[name] = bm.nvertices[name] vertices[name] = bm_part.vertex all_vertices.append(vertices) @@ -665,7 +684,9 @@ def from_mapping(cls, mim): all_vertices = np.zeros(nparcels, dtype='object') volume_shape = None if mim.volume is None else mim.volume.volume_dimensions - affine = None if mim.volume is None else mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix + affine = None + if mim.volume is not None: + affine = mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix nvertices = {} for surface in mim.surfaces: nvertices[surface.brain_structure] = surface.surface_number_of_vertices @@ -679,7 +700,8 @@ def from_mapping(cls, mim): name = vertex.brain_structure vertices[vertex.brain_structure] = np.array(vertex) if name not in nvertices.keys(): - raise ValueError("Number of vertices for surface structure %s not defined" % name) + raise ValueError("Number of vertices for surface structure %s not defined" % + name) all_voxels[idx_parcel] = voxels all_vertices[idx_parcel] = vertices all_names.append(parcel.name) @@ -749,9 +771,11 @@ def __eq__(self, other): any((vox1 != vox2).any() for vox1, vox2 in zip(self.voxels, other.voxels))): return False if self.affine is not None: - if ( other.affine is None or + if ( + other.affine is None or abs(self.affine - other.affine).max() > 1e-8 or - self.volume_shape != other.volume_shape): + self.volume_shape != other.volume_shape + ): return False elif other.affine is not None: return False @@ -783,11 +807,13 @@ def __add__(self, other): affine, shape = self.affine, self.volume_shape if other.affine is not None and ((other.affine != affine).all() or other.volume_shape != shape): - raise ValueError("Trying to concatenate two Parcels defined in a different brain volume") + raise ValueError("Trying to concatenate two Parcels defined " + + "in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two Parcels with inconsistent number of vertices for %s" + raise ValueError("Trying to concatenate two Parcels with inconsistent " + + "number of vertices for %s" % name) nvertices[name] = value return type(self)( @@ -838,7 +864,8 @@ def get_element(self, index): class Scalar(Axis): """ - Along this axis of the CIFTI vector/matrix each row/column has been given a unique name and optionally metadata + Along this axis of the CIFTI vector/matrix each row/column has been given + a unique name and optionally metadata """ def __init__(self, name, meta=None): @@ -850,7 +877,8 @@ def __init__(self, name, meta=None): name : np.ndarray (N, ) string array with the parcel names meta : np.ndarray - (N, ) object array with a dictionary of metadata for each row/column. Defaults to empty dictionary + (N, ) object array with a dictionary of metadata for each row/column. + Defaults to empty dictionary """ self.name = np.asarray(name, dtype='U') if meta is None: @@ -977,8 +1005,9 @@ def __init__(self, name, label, meta=None): name : np.ndarray (N, ) string array with the parcel names label : np.ndarray - single dictionary or (N, ) object array with dictionaries mapping from integers to (name, (R, G, B, A)), - where name is a string and R, G, B, and A are floats between 0 and 1 giving the colour and alpha + single dictionary or (N, ) object array with dictionaries mapping + from integers to (name, (R, G, B, A)), where name is a string and R, G, B, and A are + floats between 0 and 1 giving the colour and alpha (i.e., transparency) meta : np.ndarray (N, ) object array with a dictionary of metadata for each row/column """ @@ -1056,7 +1085,11 @@ def __eq__(self, other): """ if not isinstance(other, Label) or self.size != other.size: return False - return (self.name == other.name).all() and (self.meta == other.meta).all() and (self.label == other.label).all() + return ( + (self.name == other.name).all() and + (self.meta == other.meta).all() and + (self.label == other.label).all() + ) def __add__(self, other): """ @@ -1191,7 +1224,8 @@ def unit(self, ): @unit.setter def unit(self, value): if value.upper() not in ("SECOND", "HERTZ", "METER", "RADIAN"): - raise ValueError("Series unit should be one of ('second', 'hertz', 'meter', or 'radian'") + raise ValueError("Series unit should be one of " + + "('second', 'hertz', 'meter', or 'radian'") self._unit = value.upper() def extend(self, other_axis): @@ -1270,10 +1304,12 @@ def __getitem__(self, item): nelements = (idx_end - idx_start) // step if nelements < 0: nelements = 0 - return Series(idx_start * self.step + self.start, self.step * step, nelements, self.unit) + return Series(idx_start * self.step + self.start, self.step * step, + nelements, self.unit) elif isinstance(item, int): return self.get_element(item) - raise IndexError('Series can only be indexed with integers or slices without breaking the regular structure') + raise IndexError('Series can only be indexed with integers or slices ' + + 'without breaking the regular structure') def get_element(self, index): """ @@ -1291,5 +1327,6 @@ def get_element(self, index): if index < 0: index = self.size + index if index >= self.size: - raise IndexError("index %i is out of range for get_series with size %i" % (index, self.size)) + raise IndexError("index %i is out of range for get_series with size %i" % + (index, self.size)) return self.start + self.step * index From a0e8ff38b126ab5dd80d9240443d73d96a5a1862 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 14 Mar 2019 20:23:38 +0000 Subject: [PATCH 014/689] BF: fixed abstract class for python2 --- nibabel/cifti2/cifti2_axes.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 97946e22cf..c0d8dd86d5 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1,8 +1,8 @@ import numpy as np from nibabel.cifti2 import cifti2 -from six import string_types +from six import string_types, add_metaclass from operator import xor -from abc import ABC, abstractmethod +import abc def from_mapping(mim): @@ -53,7 +53,8 @@ def to_header(axes): return cifti2.Cifti2Header(matrix) -class Axis(ABC): +@add_metaclass(abc.ABCMeta) +class Axis(object): """ Abstract class for any object describing the rows or columns of a CIFTI vector/matrix @@ -64,11 +65,11 @@ class Axis(ABC): def size(self, ): return len(self) - @abstractmethod + @abc.abstractmethod def __len__(self): pass - @abstractmethod + @abc.abstractmethod def __eq__(self, other): """ Compares whether two Axes are equal @@ -84,7 +85,7 @@ def __eq__(self, other): """ pass - @abstractmethod + @abc.abstractmethod def __add__(self, other): """ Concatenates two Axes of the same type @@ -100,7 +101,7 @@ def __add__(self, other): """ pass - @abstractmethod + @abc.abstractmethod def __getitem__(self, item): """ Extracts definition of single row/column or new Axis describing a subset of the rows/columns From 420dacbfb204510bb57647d3e39cae01de005ca1 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 15 Mar 2019 09:58:00 +0000 Subject: [PATCH 015/689] BF: allow any integer type, not just int --- nibabel/cifti2/cifti2_axes.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index c0d8dd86d5..9c01e2abd1 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1,6 +1,6 @@ import numpy as np from nibabel.cifti2 import cifti2 -from six import string_types, add_metaclass +from six import string_types, add_metaclass, integer_types from operator import xor import abc @@ -156,7 +156,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, """ if voxel is None: if vertex is None: - raise ValueError("Voxel and vertex indices not defined") + raise ValueError("At least one of voxel or vertex indices should be defined") nelements = len(vertex) self.voxel = -np.ones((nelements, 3), dtype=int) else: @@ -458,7 +458,7 @@ def volume_shape(self, value): value = tuple(value) if len(value) != 3: raise ValueError("Volume shape should be a tuple of length 3") - if not all(isinstance(v, int) for v in value): + if not all(isinstance(v, integer_types) for v in value): raise ValueError("All elements of the volume shape should be integers") self._volume_shape = value @@ -547,7 +547,7 @@ def __getitem__(self, item): Otherwise returns a new BrainModel """ - if isinstance(item, int): + if isinstance(item, integer_types): return self.get_element(item) if isinstance(item, string_types): raise IndexError("Can not index an Axis with a string (except for Parcels)") @@ -839,7 +839,7 @@ def __getitem__(self, item): if len(idx) > 1: raise IndexError("Multiple get_parcels with name %s found" % item) return self.voxels[idx[0]], self.vertices[idx[0]] - if isinstance(item, int): + if isinstance(item, integer_types): return self.get_element(item) return type(self)(self.name[item], self.voxels[item], self.vertices[item], self.affine, self.volume_shape, self.nvertices) @@ -969,7 +969,7 @@ def __add__(self, other): ) def __getitem__(self, item): - if isinstance(item, int): + if isinstance(item, integer_types): return self.get_element(item) return type(self)(self.name[item], self.meta[item]) @@ -1114,7 +1114,7 @@ def __add__(self, other): ) def __getitem__(self, item): - if isinstance(item, int): + if isinstance(item, integer_types): return self.get_element(item) return type(self)(self.name[item], self.label[item], self.meta[item]) @@ -1307,7 +1307,7 @@ def __getitem__(self, item): nelements = 0 return Series(idx_start * self.step + self.start, self.step * step, nelements, self.unit) - elif isinstance(item, int): + elif isinstance(item, integer_types): return self.get_element(item) raise IndexError('Series can only be indexed with integers or slices ' + 'without breaking the regular structure') From 29448f134698a49fd2a61b7ee6400799c35a2eba Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 15 Mar 2019 10:59:56 +0000 Subject: [PATCH 016/689] DOC: fixed many issues with the documentation Visually checked that the compiled documentation looks okay (not great) --- nibabel/cifti2/cifti2_axes.py | 89 +++++++++++++++++------------------ 1 file changed, 43 insertions(+), 46 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 9c01e2abd1..2d0949bc5f 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -113,33 +113,25 @@ class BrainModel(Axis): Each row/column in the CIFTI vector/matrix represents a single vertex or voxel This Axis describes which vertex/voxel is represented by each row/column. - - Attributes - ---------- - name : np.ndarray - (N, ) array with the brain structure objects - voxel : np.ndarray - (N, 3) array with the voxel indices - vertex : np.ndarray - (N, ) array with the vertex indices """ def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None, nvertices=None): """ - Creates a BrainModel axis defining the vertices and voxels represented by each row/column + New BrainModel axes can be constructed by passing on the greyordinate brain-structure + names and voxel/vertex indices to the constructor or by one of the + factory methods: - A more convenient way to create BrainModel axes is provided by the factory methods: - - `from_mask`: creates surface or volumetric BrainModel axis from respectively + - :py:meth:`~BrainModel.from_mask`: creates surface or volumetric BrainModel axis from respectively 1D or 3D masks - - `from_surface`: creates a volumetric BrainModel axis + - :py:meth:`~BrainModel.from_surface`: creates a volumetric BrainModel axis The resulting BrainModel axes can be concatenated by adding them together. Parameters ---------- name : str or np.ndarray - brain structure name or (N, ) array with the brain structure names + brain structure name or (N, ) string array with the brain structure names voxel : np.ndarray (N, 3) array with the voxel indices (can be omitted for CIFTI files only covering the surface) @@ -337,7 +329,7 @@ def iter_structures(self, ): Yields ------ - tuple with + tuple with 3 elements: - CIFTI brain structure name - slice to select the data associated with the brain structure from the tensor - brain model covering that specific brain structure @@ -357,14 +349,16 @@ def to_cifti_brain_structure_name(name): Attempts to convert the name of an anatomical region in a format recognized by CIFTI This function returns: - * the name if it is in the CIFTI format already - * if the name is a tuple the first element is assumed to be the structure name while - the second is assumed to be the hemisphere (left, right or both). The latter will default - to both. - * names like left_cortex, cortex_left, LeftCortex, or CortexLeft will be converted to - CIFTI_STRUCTURE_CORTEX_LEFT - see ``nibabel.cifti2.tests.test_name`` for examples of which conversions are possible + - the name if it is in the CIFTI format already + - if the name is a tuple the first element is assumed to be the structure name while + the second is assumed to be the hemisphere (left, right or both). The latter will default + to both. + - names like left_cortex, cortex_left, LeftCortex, or CortexLeft will be converted to + CIFTI_STRUCTURE_CORTEX_LEFT + + see :py:func:`nibabel.cifti2.tests.test_name` for examples of + which conversions are possible Parameters ---------- @@ -587,8 +581,6 @@ class Parcels(Axis): def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvertices=None): """ - Creates a Parcels axis defining the vertices and voxels represented by each row/column - Parameters ---------- name : np.ndarray @@ -618,7 +610,7 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert for check_name in ('name', 'voxels', 'vertices'): if getattr(self, check_name).shape != (self.size, ): - raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( + raise ValueError("Input {} has incorrect shape ({}) for Parcel axis".format( check_name, getattr(self, check_name).shape)) @classmethod @@ -710,7 +702,7 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the get_parcels to a MatrixIndicesMap for storage in CIFTI format + Converts the Parsel to a MatrixIndicesMap for storage in CIFTI format Parameters ---------- @@ -739,6 +731,9 @@ def to_mapping(self, dim): @property def affine(self, ): + """ + Affine of the volumetric image in which the greyordinate voxels were defined + """ return self._affine @affine.setter @@ -753,6 +748,9 @@ def affine(self, value): @property def volume_shape(self, ): + """ + Shape of the volumetric image in which the greyordinate voxels were defined + """ return self._volume_shape @volume_shape.setter @@ -828,6 +826,7 @@ def __add__(self, other): def __getitem__(self, item): """ Extracts subset of the axes based on the type of ``item``: + - `int`: 3-element tuple of (parcel name, parcel voxels, parcel vertices) - `string`: 2-element tuple of (parcel voxels, parcel vertices - other object that can index 1D arrays: new Parcel axis @@ -837,7 +836,7 @@ def __getitem__(self, item): if len(idx) == 0: raise IndexError("Parcel %s not found" % item) if len(idx) > 1: - raise IndexError("Multiple get_parcels with name %s found" % item) + raise IndexError("Multiple parcels with name %s found" % item) return self.voxels[idx[0]], self.vertices[idx[0]] if isinstance(item, integer_types): return self.get_element(item) @@ -871,8 +870,6 @@ class Scalar(Axis): def __init__(self, name, meta=None): """ - Creates a new Scalar axis from (name, meta-data) pairs - Parameters ---------- name : np.ndarray @@ -894,7 +891,7 @@ def __init__(self, name, meta=None): @classmethod def from_mapping(cls, mim): """ - Creates a new get_scalar axis based on a CIFTI dataset + Creates a new Scalar axis based on a CIFTI dataset Parameters ---------- @@ -985,7 +982,7 @@ def get_element(self, index): Returns ------- tuple with 2 elements - - unicode name of the get_scalar + - unicode name of the row/column - dictionary with the element metadata """ return self.name[index], self.meta[index] @@ -993,14 +990,14 @@ def get_element(self, index): class Label(Axis): """ + Defines CIFTI axis for label array. + Along this axis of the CIFTI vector/matrix each row/column has been given a unique name, - get_label table, and optionally metadata + label table, and optionally metadata """ def __init__(self, name, label, meta=None): """ - Creates a new Label axis from (name, meta-data) pairs - Parameters ---------- name : np.ndarray @@ -1028,7 +1025,7 @@ def __init__(self, name, label, meta=None): @classmethod def from_mapping(cls, mim): """ - Creates a new get_scalar axis based on a CIFTI dataset + Creates a new Label axis based on a CIFTI dataset Parameters ---------- @@ -1036,7 +1033,7 @@ def from_mapping(cls, mim): Returns ------- - Scalar + Label """ tables = [{key: (value.label, value.rgba) for key, value in nm.label_table.items()} for nm in mim.named_maps] @@ -1099,7 +1096,7 @@ def __add__(self, other): Parameters ---------- other : Label - scalar axis to be appended to the current one + label axis to be appended to the current one Returns ------- @@ -1130,8 +1127,8 @@ def get_element(self, index): Returns ------- tuple with 2 elements - - unicode name of the get_scalar - - dictionary with the get_label table + - unicode name of the row/column + - dictionary with the label table - dictionary with the element metadata """ return self.name[index], self.label[index], self.meta[index] @@ -1197,7 +1194,7 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the get_series to a MatrixIndicesMap for storage in CIFTI format + Converts the Series to a MatrixIndicesMap for storage in CIFTI format Parameters ---------- @@ -1231,7 +1228,7 @@ def unit(self, value): def extend(self, other_axis): """ - Concatenates two get_series + Concatenates two Series Note: this will ignore the start point of the other axis @@ -1272,18 +1269,18 @@ def __add__(self, other): Parameters ---------- other : Series - Time get_series to append at the end of the current time get_series. - Note that the starting time of the other time get_series is ignored. + Time Series to append at the end of the current time Series. + Note that the starting time of the other time Series is ignored. Returns ------- Series - New time get_series with the concatenation of the two + New time Series with the concatenation of the two Raises ------ ValueError - raised if the repetition time of the two time get_series is different + raised if the repetition time of the two time Series is different """ if isinstance(other, Series): return self.extend(other) @@ -1328,6 +1325,6 @@ def get_element(self, index): if index < 0: index = self.size + index if index >= self.size: - raise IndexError("index %i is out of range for get_series with size %i" % + raise IndexError("index %i is out of range for Series with size %i" % (index, self.size)) return self.start + self.step * index From 56842680b3d171ac3f1f674f6e4d373517c1746c Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 15 Mar 2019 11:30:50 +0000 Subject: [PATCH 017/689] RF: made flake8 happy again --- nibabel/cifti2/cifti2_axes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 2d0949bc5f..65fd10a744 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -122,9 +122,9 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, names and voxel/vertex indices to the constructor or by one of the factory methods: - - :py:meth:`~BrainModel.from_mask`: creates surface or volumetric BrainModel axis from respectively - 1D or 3D masks - - :py:meth:`~BrainModel.from_surface`: creates a volumetric BrainModel axis + - :py:meth:`~BrainModel.from_mask`: creates surface or volumetric BrainModel axis + from respectively 1D or 3D masks + - :py:meth:`~BrainModel.from_surface`: creates a surface BrainModel axis The resulting BrainModel axes can be concatenated by adding them together. From 610ad5dd7fd5af92cbef6f558ebd79ce883b7c05 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Mar 2019 12:05:23 -0400 Subject: [PATCH 018/689] DOC: Changelog skeleton --- Changelog | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/Changelog b/Changelog index d32936cfd0..4075c043c5 100644 --- a/Changelog +++ b/Changelog @@ -24,6 +24,24 @@ Gerhard (SG), Eric Larson (EL), Yaroslav Halchenko (YOH) and Chris Cheng (CC). References like "pr/298" refer to github pull request numbers. +2.4.0 (Monday 25 March 2019) +============================ + +New features +------------ + +Enhancements +------------ + +Bug fixes +--------- + +Maintenance +----------- + +API changes and deprecations +---------------------------- + 2.3.3 (Wednesday 16 January 2019) ================================= From 61345daef6f101c9f9bdd977b6394d8271692c01 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Mar 2019 12:11:19 -0400 Subject: [PATCH 019/689] MNT/DOC: Update mailmap, zenodo, author list --- .mailmap | 4 ++-- .zenodo.json | 14 ++++++++------ doc/source/index.rst | 1 + 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.mailmap b/.mailmap index d917d4d279..00f7e70e83 100644 --- a/.mailmap +++ b/.mailmap @@ -43,13 +43,13 @@ Marc-Alexandre Côté Marc-Alexandre Cote mathiasg Michael Hanke Michael Hanke -Nguyen, Ly lxn2 +Ly Nguyen lxn2 Oliver P. Hinds ohinds Paul McCarthy Paul McCarthy Satrajit Ghosh Satrajit Ghosh Serge Koudoro skoudoro Stephan Gerhard Stephan Gerhard Thomas Roos Roosted7 -Venky Reddy R3DDY97 +Venkateswara Reddy Reddam R3DDY97 Yaroslav O. Halchenko Yaroslav O. Halchenko Yaroslav Halchenko diff --git a/.zenodo.json b/.zenodo.json index 79f1e2081b..525f64c8df 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -178,6 +178,11 @@ "name": "Kaczmarzyk, Jakub", "orcid": "0000-0002-5544-7577" }, + { + "affiliation": "Universit\u00e9 de Sherbrooke", + "name": "Legarreta, Jon Haitz", + "orcid": "0000-0002-9661-1396" + }, { "name": "Hahn, Kevin S." }, @@ -192,11 +197,6 @@ "name": "Poline, Jean-Baptiste", "orcid": "0000-0002-9794-749X" }, - { - "affiliation": "Universit\u00e9 de Sherbrooke", - "name": "Legarreta, Jon Haitz", - "orcid": "0000-0002-9661-1396" - }, { "affiliation": "University College London, London, UK", "name": "Stutters, Jon", @@ -265,7 +265,9 @@ "name": "Roos, Thomas" }, { - "name": "Reddy, Venky" + "affiliation": "National Institute of Mental Health and Neuro-Sciences, India", + "name": "Reddam, Venkateswara Reddy", + "orcid": "0000-0001-6817-2966" }, { "name": "freec84" diff --git a/doc/source/index.rst b/doc/source/index.rst index b768a57d71..210161b8bf 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -92,6 +92,7 @@ contributed code and discussion (in rough order of appearance): * Igor Solovey * Jon Haitz Legarreta Gorroño * Katrin Leinweber +* Soichi Hayashi License reprise =============== From 570cdc1518a347ebe5f202392e76ce7e73c147f4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Mar 2019 12:13:19 -0400 Subject: [PATCH 020/689] MAINT: Version 2.4.0 --- nibabel/info.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index abe71735cd..1726d0e53d 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -19,8 +19,8 @@ _version_major = 2 _version_minor = 4 _version_micro = 0 -_version_extra = 'dev' -# _version_extra = '' +# _version_extra = 'dev' +_version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From c3edd0a9c56655d2d1d59f052ef0d166ff47f7ae Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 17 Mar 2019 16:39:06 -0400 Subject: [PATCH 021/689] TEST: Check world direction of encoding remains consistent --- nibabel/tests/test_nifti1.py | 32 ++++++++++++++++++++++++++++++ nibabel/tests/test_orientations.py | 22 ++++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 78f876ec7d..8ae9d35116 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -28,8 +28,10 @@ from nibabel.spatialimages import HeaderDataError from nibabel.tmpdirs import InTemporaryDirectory from ..freesurfer import load as mghload +from ..orientations import aff2axcodes from .test_arraywriters import rt_err_estimate, IUINT_TYPES +from .test_orientations import ALL_ORNTS from .test_helpers import bytesio_filemap, bytesio_round_trip from .nibabel_data import get_nibabel_data, needs_nibabel_data @@ -1403,6 +1405,36 @@ def test_rt_bias(self): bias_thresh = np.max([max_miss / np.sqrt(count), eps]) assert_true(np.abs(bias) < bias_thresh) + def test_reoriented_dim_info(self): + # Check that dim_info is reoriented correctly + arr = np.arange(24).reshape((2, 3, 4)) + # Start as RAS + aff = np.diag([2, 3, 4, 1]) + simg = self.single_class(arr, aff) + for freq, phas, slic in ((0, 1, 2), + (0, 2, 1), + (1, 0, 2), + (2, 0, 1), + (None, None, None), + (0, 2, None), + (0, None, None), + (None, 2, 1), + (None, None, 1), + ): + simg.header.set_dim_info(freq, phas, slic) + fdir = 'RAS'[freq] if freq is not None else None + pdir = 'RAS'[phas] if phas is not None else None + sdir = 'RAS'[slic] if slic is not None else None + for ornt in ALL_ORNTS: + rimg = simg.as_reoriented(np.array(ornt)) + axcode = aff2axcodes(rimg.affine) + dirs = ''.join(axcode).replace('P', 'A').replace('I', 'S').replace('L', 'R') + new_freq, new_phas, new_slic = rimg.header.get_dim_info() + new_fdir = dirs[new_freq] if new_freq is not None else None + new_pdir = dirs[new_phas] if new_phas is not None else None + new_sdir = dirs[new_slic] if new_slic is not None else None + assert_equal((new_fdir, new_pdir, new_sdir), (fdir, pdir, sdir)) + @runif_extra_has('slow') def test_large_nifti1(): diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 58c5e5f9e2..0605d33f20 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -83,6 +83,18 @@ OUT_ORNTS = [np.array(ornt) for ornt in OUT_ORNTS] +_LABELS = ['RL', 'AP', 'SI'] +ALL_AXCODES = [(_LABELS[i0][j0], _LABELS[i1][j1], _LABELS[i2][j2]) + for i0 in range(3) for i1 in range(3) for i2 in range(3) + if i0 != i1 != i2 != i0 + for j0 in range(2) for j1 in range(2) for j2 in range(2)] + +ALL_ORNTS = [[[i0, j0], [i1, j1], [i2, j2]] + for i0 in range(3) for i1 in range(3) for i2 in range(3) + if i0 != i1 != i2 != i0 + for j0 in [1, -1] for j1 in [1, -1] for j2 in [1, -1]] + + def same_transform(taff, ornt, shape): # Applying transformations implied by `ornt` to a made-up array # ``arr`` of shape `shape`, results in ``t_arr``. When the point @@ -125,6 +137,10 @@ def test_apply(): apply_orientation, a, [[0, 1], [np.nan, np.nan], [2, 1]]) + shape = np.array(a.shape) + for ornt in ALL_ORNTS: + t_arr = apply_orientation(a, ornt) + assert_array_equal(a.shape, np.array(t_arr.shape)[np.array(ornt)[:, 0]]) def test_flip_axis(): @@ -282,6 +298,9 @@ def test_ornt2axcodes(): # As do directions not in range assert_raises(ValueError, ornt2axcodes, [[0, 0]]) + for axcodes, ornt in zip(ALL_AXCODES, ALL_ORNTS): + assert_equal(ornt2axcodes(ornt), axcodes) + def test_axcodes2ornt(): # Go from axcodes back to orientations @@ -340,6 +359,9 @@ def test_axcodes2ornt(): assert_raises(ValueError, axcodes2ornt, 'blD', ('SD', 'BF', 'lD')) assert_raises(ValueError, axcodes2ornt, 'blD', ('SD', 'SF', 'lD')) + for axcodes, ornt in zip(ALL_AXCODES, ALL_ORNTS): + assert_array_equal(axcodes2ornt(axcodes), ornt) + def test_aff2axcodes(): assert_equal(aff2axcodes(np.eye(4)), tuple('RAS')) From 78772c40f7e2b0cac0943909dc0349f0b45d618f Mon Sep 17 00:00:00 2001 From: constracti Date: Sat, 19 Jan 2019 02:46:07 +0200 Subject: [PATCH 022/689] BF: fix dim_info transformation after orientation --- nibabel/nifti1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 548ad34658..021e82ce49 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -2020,7 +2020,7 @@ def as_reoriented(self, ornt): # otherwise check where we have mapped it to if value is None: continue - new_dim[idx] = np.where(ornt[:, 0] == idx)[0] + new_dim[idx] = ornt[value, 0] img.header.set_dim_info(*new_dim) From 8d0f0303b99a6ceaaf2c6edd9a2c44fc439765d9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 18 Mar 2019 11:06:05 -0400 Subject: [PATCH 023/689] RF: Use list comprehension in reordering --- nibabel/nifti1.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 021e82ce49..b7e81405a5 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -2014,13 +2014,9 @@ def as_reoriented(self, ornt): return img # Also apply the transform to the dim_info fields - new_dim = list(img.header.get_dim_info()) - for idx, value in enumerate(new_dim): - # For each value, leave as None if it was that way, - # otherwise check where we have mapped it to - if value is None: - continue - new_dim[idx] = ornt[value, 0] + new_dim = [ + None if orig_dim is None else int(ornt[orig_dim, 0]) + for orig_dim in img.header.get_dim_info()] img.header.set_dim_info(*new_dim) From fd7a311a89486c5f0c685faf9f6ec0be6bceb2a1 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 1 Mar 2019 12:27:40 -0500 Subject: [PATCH 024/689] ENH: Add NIFTI_XFORM_TEMPLATE_OTHER xform code --- nibabel/nifti1.py | 4 +++- nibabel/tests/test_nifti1.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 548ad34658..5393b5a543 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -125,7 +125,9 @@ (1, 'scanner', "NIFTI_XFORM_SCANNER_ANAT"), (2, 'aligned', "NIFTI_XFORM_ALIGNED_ANAT"), (3, 'talairach', "NIFTI_XFORM_TALAIRACH"), - (4, 'mni', "NIFTI_XFORM_MNI_152")), fields=('code', 'label', 'niistring')) + (4, 'mni', "NIFTI_XFORM_MNI_152"), + (5, 'template', "NIFTI_XFORM_TEMPLATE_OTHER"), + ), fields=('code', 'label', 'niistring')) # unit codes unit_codes = Recoder(( # code, label diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 78f876ec7d..3287634f82 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -226,6 +226,21 @@ def test_nifti_qsform_checks(self): assert_equal(message, 'sform_code -1 not valid; setting to 0') + def test_nifti_xform_codes(self): + # Verify that all xform codes can be set in both qform and sform + hdr = self.header_class() + affine = np.eye(4) + for code in nifti1.xform_codes.keys(): + hdr.set_qform(affine, code) + assert_equal(hdr['qform_code'], nifti1.xform_codes[code]) + hdr.set_sform(affine, code) + assert_equal(hdr['sform_code'], nifti1.xform_codes[code]) + + # Raise KeyError on unknown code + for bad_code in (-1, 6, 10): + assert_raises(KeyError, hdr.set_qform, affine, bad_code) + assert_raises(KeyError, hdr.set_sform, affine, bad_code) + def test_magic_offset_checks(self): # magic and offset HC = self.header_class From d9e59e85ff15331ea1cbb05c40e6de827f0dab23 Mon Sep 17 00:00:00 2001 From: Matthew Brett Date: Tue, 19 Mar 2019 12:33:50 +0000 Subject: [PATCH 025/689] DOC: record handover to Chris of maintainer role Update various docs to show Chris is maintainer now. Reorder authors according to current contributions. --- COPYING | 5 +++-- Changelog | 34 +++++++++++++++++----------------- doc/source/index.rst | 6 +++--- nibabel/info.py | 2 +- 4 files changed, 24 insertions(+), 23 deletions(-) diff --git a/COPYING b/COPYING index 6f03ba5ccd..8511235733 100644 --- a/COPYING +++ b/COPYING @@ -18,12 +18,13 @@ documentation is covered by the MIT license. The MIT License - Copyright (c) 2009-2014 Matthew Brett + Copyright (c) 2009-2019 Matthew Brett Copyright (c) 2010-2013 Stephan Gerhard Copyright (c) 2006-2014 Michael Hanke Copyright (c) 2011 Christian Haselgrove Copyright (c) 2010-2011 Jarrod Millman - Copyright (c) 2011-2014 Yaroslav Halchenko + Copyright (c) 2011-2019 Yaroslav Halchenko + Copyright (c) 2015-2019 Chris Markiewicz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Changelog b/Changelog index d32936cfd0..dc7053be8f 100644 --- a/Changelog +++ b/Changelog @@ -18,9 +18,10 @@ The full VCS changelog is available here: Nibabel releases **************** -Most work on NiBabel so far has been by Matthew Brett (MB), Michael Hanke (MH) -Ben Cipollini (BC), Marc-Alexandre Côté (MC), Chris Markiewicz (CM), Stephan -Gerhard (SG), Eric Larson (EL), Yaroslav Halchenko (YOH) and Chris Cheng (CC). +Most work on NiBabel so far has been by Matthew Brett (MB), Chris Markiewicz +(CM), Michael Hanke (MH), Marc-Alexandre Côté (MC), Ben Cipollini (BC), Paul +McCarthy (PM), Chris Cheng (CC), Yaroslav Halchenko (YOH), Satra Ghosh (SG), +Eric Larson (EL), Demien Wasserman, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. @@ -116,16 +117,16 @@ Enhancements * Simplfiy MGHImage and add footer fields (pr/569) (CM, reviewed by MB) * Force sform/qform codes to be ints, rather than numpy types (pr/575) (Paul McCarthy, reviewed by MB, CM) -* Auto-fill color table in FreeSurfer annotation file (pr/592) (Paul McCarthy, +* Auto-fill color table in FreeSurfer annotation file (pr/592) (PM, reviewed by CM, MB) * Set default intent code for CIFTI2 images (pr/604) (Mathias Goncalves, - reviewed by CM, Satra Ghosh, MB, Tim Coalson) + reviewed by CM, SG, MB, Tim Coalson) * Raise informative error on empty files (pr/611) (Pradeep Raamana, reviewed by CM, MB) * Accept degenerate filenames such as ``.nii`` (pr/621) (Dimitri Papadopoulos-Orfanos, reviewed by Yaroslav Halchenko) * Take advantage of ``IndexedGzipFile`` ``drop_handles`` flag to release - filehandles by default (pr/614) (Paul McCarthy, reviewed by CM, MB) + filehandles by default (pr/614) (PM, reviewed by CM, MB) Bug fixes --------- @@ -135,7 +136,7 @@ Bug fixes CM, MB) * Accept lower-case orientation codes in TRK files (pr/600) (Kesshi Jordan, MB, reviewed by MB, MC, CM) -* Annotation file reading (pr/592) (Paul McCarthy, reviewed by CM, MB) +* Annotation file reading (pr/592) (PM, reviewed by CM, MB) * Fix buffer size calculation in ArraySequence (pr/597) (Serge Koudoro, reviewed by MC, MB, Eleftherios Garyfallidis, CM) * Resolve ``UnboundLocalError`` in Python 3 (pr/607) (Jakub Kaczmarzyk, @@ -175,14 +176,14 @@ Bug fixes * Set L/R labels in orthoview correctly (pr/564) (CM) * Defer use of ufunc / memmap test - allows "freezing" (pr/572) (MB, reviewed - by Satra Ghosh) + by SG) * Fix doctest failures with pre-release numpy (pr/582) (MB, reviewed by CM) Maintenance ----------- -* Update documentation around NIfTI qform/sform codes (pr/576) (Paul McCarthy, - reviewed by MB, CM) + (pr/580) (Bennet Fauber, reviewed by Paul McCarthy) +* Update documentation around NIfTI qform/sform codes (pr/576) (PM, + reviewed by MB, CM) + (pr/580) (Bennet Fauber, reviewed by PM) * Skip precision test on macOS, newer numpy (pr/583) (MB, reviewed by CM) * Simplify AppVeyor script, removing conda (pr/584) (MB, reviewed by CM) @@ -192,12 +193,11 @@ Maintenance New features ------------ -* CIFTI support (pr/249) (Satra Ghosh, Michiel Cottaar, BC, CM, Demian - Wassermann, MB) +* CIFTI support (pr/249) (SG, Michiel Cottaar, BC, CM, Demian Wasserman, MB) * Support for MRtrix TCK streamlines file format (pr/486) (MC, reviewed by MB, Arnaud Bore, J-Donald Tournier, Jean-Christophe Houde) * Added ``get_fdata()`` as default method to retrieve scaled floating point - data from ``DataobjImage``s (pr/551) (MB, reviewed by CM, Satra Ghosh) + data from ``DataobjImage``s (pr/551) (MB, reviewed by CM, SG) Enhancements ------------ @@ -211,19 +211,19 @@ Enhancements * Allow dtype specifiers as fileslice input (pr/485) (MB) * Support "headerless" ArrayProxy specification, enabling memory-efficient ArrayProxy reshaping (pr/521) (CM) -* Allow unknown NIfTI intent codes, add FSL codes (pr/528) (Paul McCarthy) +* Allow unknown NIfTI intent codes, add FSL codes (pr/528) (PM) * Improve error handling for ``img.__getitem__`` (pr/533) (Ariel Rokem) * Delegate reorientation to SpatialImage classes (pr/544) (Mark Hymers, CM, reviewed by MB) * Enable using ``indexed_gzip`` to reduce memory usage when reading from - gzipped NIfTI and MGH files (pr/552) (Paul McCarthy, reviewed by MB, CM) + gzipped NIfTI and MGH files (pr/552) (PM, reviewed by MB, CM) Bug fixes --------- * Miscellaneous MINC reader fixes (pr/493) (Robert D. Vincent, reviewed by CM, MB) -* Fix corner case in ``wrapstruct.get`` (pr/516) (Paul McCarthy, reviewed by +* Fix corner case in ``wrapstruct.get`` (pr/516) (PM, reviewed by CM, MB) Maintenance @@ -524,7 +524,7 @@ Special thanks to Chris Burns, Jarrod Millman and Yaroslav Halchenko. * New feature release * Python 3.2 support -* Substantially enhanced gifti reading support (SG) +* Substantially enhanced gifti reading support (Stephan Gerhard) * Refactoring of trackvis read / write to allow reading and writing of voxel points and mm points in tracks. Deprecate use of negative voxel sizes; set voxel_order field in trackvis header. Thanks to Chris Filo diff --git a/doc/source/index.rst b/doc/source/index.rst index b768a57d71..3c42096b2a 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -27,9 +27,9 @@ discussions, release procedure and more. Authors and Contributors ======================== -The main authors of NiBabel are `Matthew Brett`_, `Michael Hanke`_, `Ben -Cipollini`_, `Marc-Alexandre Côté`_, Chris Markiewicz, `Stephan Gerhard`_ and -`Eric Larson`_. The authors are grateful to the following people who have +Most work on NiBabel so far has been by `Matthew Brett`_, Chris Markiewicz, +`Michael Hanke`_, `Marc-Alexandre Côté`_, `Ben Cipollini`_, Paul McCarthy and +Chris Cheng. The authors are grateful to the following people who have contributed code and discussion (in rough order of appearance): * `Yaroslav O. Halchenko`_ diff --git a/nibabel/info.py b/nibabel/info.py index abe71735cd..56892c6efa 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -192,7 +192,7 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): # Main setup parameters NAME = 'nibabel' -MAINTAINER = "Matthew Brett, Michael Hanke, Eric Larson, Chris Markiewicz" +MAINTAINER = "Chris Markiewicz" MAINTAINER_EMAIL = "neuroimaging@python.org" DESCRIPTION = description LONG_DESCRIPTION = long_description From 6d031e3f5393ad16ca47e81040f6a15d181de80d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 19 Mar 2019 15:31:24 -0400 Subject: [PATCH 026/689] MNT: Update author list, mailmap, zenodo --- .mailmap | 1 + .zenodo.json | 7 +++++++ doc/source/index.rst | 2 ++ 3 files changed, 10 insertions(+) diff --git a/.mailmap b/.mailmap index 00f7e70e83..d0f2bd6aff 100644 --- a/.mailmap +++ b/.mailmap @@ -38,6 +38,7 @@ Jean-Baptiste Poline jbpoline Jon Haitz Legarreta Jon Haitz Legarreta Gorroño Kesshi Jordan kesshijordan Kevin S. Hahn Kevin S. Hahn +Konstantinos Raktivan constracti Krish Subramaniam Krish Subramaniam Marc-Alexandre Côté Marc-Alexandre Cote Mathias Goncalves mathiasg diff --git a/.zenodo.json b/.zenodo.json index 525f64c8df..10d7a54128 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -162,6 +162,9 @@ { "name": "Nguyen, Ly" }, + { + "name": "Reddigari, Samir" + }, { "name": "St-Jean, Samuel" }, @@ -247,6 +250,10 @@ "name": "Leinweber, Katrin", "orcid": "0000-0001-5135-5758" }, + { + "affiliation": "National Technical University of Athens, Greece", + "name": "Raktivan, Konstantinos" + }, { "affiliation": "Friedrich-Alexander-Universit\u00e4t Erlangen-N\u00fcrnberg, Erlangen, Germany", "name": "Fischer, Peter", diff --git a/doc/source/index.rst b/doc/source/index.rst index 550dc8a0cb..d63844f712 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -93,6 +93,8 @@ contributed code and discussion (in rough order of appearance): * Jon Haitz Legarreta Gorroño * Katrin Leinweber * Soichi Hayashi +* Samir Reddigari +* Konstantinos Raktivan License reprise =============== From 42f3c9bce8d81ba489facc461417a0ad970bf1c5 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 19 Mar 2019 16:41:18 -0400 Subject: [PATCH 027/689] DOC: Update changelog --- Changelog | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/Changelog b/Changelog index 73114f8cdf..0aae6b0212 100644 --- a/Changelog +++ b/Changelog @@ -33,12 +33,34 @@ New features Enhancements ------------ +* Accept TCK files produced by tools with other delimiter/EOF defaults + (pr/720) (Soichi Hayashi, reviewed by CM, MB, MC) +* Allow BrainModels or Parcels to contain a single vertex in CIFTI + (pr/739) (Michiel Cottaar, reviewed by CM) +* Support for ``NIFTI_XFORM_TEMPLATE_OTHER`` xform code (pr/743) (CM) Bug fixes --------- +* Skip refcheck in ArraySequence construction/extension (pr/719) (Ariel + Rokem, reviewed by CM, MC) +* Use safe resizing for ArraySequence extension (pr/724) (CM, reviewed + by MC) +* Fix typo in error message (pr/726) (Jon Haitz Legarreta Gorroño, + reviewed by CM) +* Support DICOM slice sorting in Python 3 (pr/728) (Samir Reddigari, + reviewed by CM) +* Correctly reorient dim_info when reorienting NIfTI images + (Konstantinos Raktivan, CM, reviewed by CM) Maintenance ----------- +* Import updates to reduce upstream deprecation warnings (pr/711, + pr/705, pr/738) (EL, YOH, reviewed by CM) +* Delay import of ``nibabel.testing``, ``nose`` and ``mock`` to speed up + import (pr/699) (CM) +* Increase coverage testing, drop coveralls (pr/722, pr/732) (CM) +* Add Zenodo metadata, sorted by commits (pr/732) (CM + others) +* Update author listing and copyrights (pr/742) (MB, reviewed by CM) API changes and deprecations ---------------------------- From fbd28dc20543b2c275fe508abe1c218b9970c94a Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 19 Mar 2019 21:40:23 +0000 Subject: [PATCH 028/689] Apply suggestions from code review Co-Authored-By: MichielCottaar --- nibabel/cifti2/cifti2_axes.py | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 65fd10a744..af4524eaf6 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -62,7 +62,7 @@ class Axis(object): """ @property - def size(self, ): + def size(self): return len(self) @abc.abstractmethod @@ -150,13 +150,13 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, if vertex is None: raise ValueError("At least one of voxel or vertex indices should be defined") nelements = len(vertex) - self.voxel = -np.ones((nelements, 3), dtype=int) + self.voxel = np.full((nelements, 3), fill_value=-1, dtype=int) else: nelements = len(voxel) self.voxel = np.asarray(voxel, dtype=int) if vertex is None: - self.vertex = -np.ones(nelements, dtype=int) + self.vertex = np.full(nelements, fill_value=-1, dtype=int) else: self.vertex = np.asarray(vertex, dtype=int) @@ -173,16 +173,17 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, if name not in self.name: del self.nvertices[name] - if self.is_surface.all(): + is_surface = self.is_surface + if is_surface.all(): self.affine = None self.volume_shape = None else: self.affine = affine self.volume_shape = volume_shape - if (self.vertex[self.is_surface] < 0).any(): + if np.any(self.vertex[is_surface] < 0): raise ValueError('Undefined vertex indices found for surface elements') - if (self.voxel[~self.is_surface] < 0).any(): + if np.any(self.voxel[~is_surface] < 0): raise ValueError('Undefined voxel indices found for volumetric elements') for check_name in ('name', 'voxel', 'vertex'): @@ -259,9 +260,9 @@ def from_mapping(cls, mim): ------- BrainModel """ - nbm = np.sum([bm.index_count for bm in mim.brain_models]) - voxel = -np.ones((nbm, 3)) - vertex = -np.ones(nbm) + nbm = sum(bm.index_count for bm in mim.brain_models) + voxel = np.full((nbm, 3), fill_value=-1, dtype=int) + vertex = np.full(nbm, fill_value=-1, dtype=int) name = [] nvertices = {} @@ -323,7 +324,7 @@ def to_mapping(self, dim): mim.append(cifti_bm) return mim - def iter_structures(self, ): + def iter_structures(self): """ Iterates over all brain structures in the order that they appear along the axis @@ -414,7 +415,7 @@ def to_cifti_brain_structure_name(name): return proposed_name @property - def is_surface(self, ): + def is_surface(self): """ (N, ) boolean array which is true for any element on the surface """ @@ -499,7 +500,8 @@ def __add__(self, other): ------- BrainModel """ - if isinstance(other, BrainModel): + if not isinstance(other, BrainModel): + return NotImplemented if self.affine is None: affine, shape = other.affine, other.volume_shape else: @@ -516,7 +518,7 @@ def __add__(self, other): raise ValueError("Trying to concatenate two BrainModels with inconsistent " + "number of vertices for %s" % name) nvertices[name] = value - return type(self)( + return self.__class__( np.append(self.name, other.name), np.concatenate((self.voxel, other.voxel), 0), np.append(self.vertex, other.vertex), @@ -545,7 +547,7 @@ def __getitem__(self, item): return self.get_element(item) if isinstance(item, string_types): raise IndexError("Can not index an Axis with a string (except for Parcels)") - return type(self)(self.name[item], self.voxel[item], self.vertex[item], + return self.__class__(self.name[item], self.voxel[item], self.vertex[item], self.affine, self.volume_shape, self.nvertices) def get_element(self, index): @@ -565,8 +567,8 @@ def get_element(self, index): - structure.BrainStructure object describing the brain structure the element was taken from """ is_surface = self.name[index] in self.nvertices.keys() - name = 'vertex' if is_surface else 'voxel' - return is_surface, getattr(self, name)[index], self.name[index] + struct = self.vertex if is_surface else self.voxel + return is_surface, struct[index], self.name[index] class Parcels(Axis): From c33e0d17dad2711ddc89b49e4e8eb3dd9d62640b Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 19 Mar 2019 22:02:15 +0000 Subject: [PATCH 029/689] Added other reviewer suggestions Still need to to the tests of the validation code --- nibabel/cifti2/cifti2_axes.py | 70 ++++++++++++++++------------------- 1 file changed, 31 insertions(+), 39 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index af4524eaf6..a1c0de7165 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -279,14 +279,6 @@ def from_mapping(cls, mim): if affine is None: shape = mim.volume.volume_dimensions affine = mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix - else: - if shape != mim.volume.volume_dimensions: - raise ValueError("All volume masks should be defined in the same volume") - if ( - affine != - mim.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix - ).any(): - raise ValueError("All volume masks should have the same affine") return cls(name, voxel, vertex, affine, shape, nvertices) def to_mapping(self, dim): @@ -478,13 +470,13 @@ def __eq__(self, other): if xor(self.affine is None, other.affine is None): return False return ( - ((self.affine is None and other.affine is None) or - (abs(self.affine - other.affine).max() < 1e-8 and - self.volume_shape == other.volume_shape)) and - (self.nvertices == other.nvertices) and - (self.name == other.name).all() and - (self.voxel[~self.is_surface] == other.voxel[~other.is_surface]).all() and - (self.vertex[~self.is_surface] == other.vertex[~other.is_surface]).all() + (self.affine is None or + np.allclose(self.affine, other.affine) and + self.volume_shape == other.volume_shape) and + self.nvertices == other.nvertices and + np.array_equal(self.name, other.name) and + np.array_equal(self.voxel[~self.is_surface], other.voxel[~other.is_surface]) and + np.array_equal(self.vertex[self.is_surface], other.vertex[other.is_surface]) ) def __add__(self, other): @@ -502,29 +494,29 @@ def __add__(self, other): """ if not isinstance(other, BrainModel): return NotImplemented - if self.affine is None: - affine, shape = other.affine, other.volume_shape - else: - affine, shape = self.affine, self.volume_shape - if other.affine is not None and ( - (other.affine != affine).all() or - other.volume_shape != shape - ): - raise ValueError("Trying to concatenate two BrainModels defined " + - "in a different brain volume") - nvertices = dict(self.nvertices) - for name, value in other.nvertices.items(): - if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two BrainModels with inconsistent " + - "number of vertices for %s" % name) - nvertices[name] = value - return self.__class__( - np.append(self.name, other.name), - np.concatenate((self.voxel, other.voxel), 0), - np.append(self.vertex, other.vertex), - affine, shape, nvertices - ) - return NotImplemented + if self.affine is None: + affine, shape = other.affine, other.volume_shape + else: + affine, shape = self.affine, self.volume_shape + if other.affine is not None and ( + not np.allclose(other.affine, affine) or + other.volume_shape != shape + ): + raise ValueError("Trying to concatenate two BrainModels defined " + + "in a different brain volume") + + nvertices = dict(self.nvertices) + for name, value in other.nvertices.items(): + if name in nvertices.keys() and nvertices[name] != value: + raise ValueError("Trying to concatenate two BrainModels with inconsistent " + + "number of vertices for %s" % name) + nvertices[name] = value + return self.__class__( + np.append(self.name, other.name), + np.concatenate((self.voxel, other.voxel), 0), + np.append(self.vertex, other.vertex), + affine, shape, nvertices + ) def __getitem__(self, item): """ @@ -548,7 +540,7 @@ def __getitem__(self, item): if isinstance(item, string_types): raise IndexError("Can not index an Axis with a string (except for Parcels)") return self.__class__(self.name[item], self.voxel[item], self.vertex[item], - self.affine, self.volume_shape, self.nvertices) + self.affine, self.volume_shape, self.nvertices) def get_element(self, index): """ From bc2064eb08c3208cd9542f521f6c4d89c80c520e Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 19 Mar 2019 22:03:15 +0000 Subject: [PATCH 030/689] RF: remove spurious ', ' from method definition --- nibabel/cifti2/cifti2_axes.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index a1c0de7165..357a6db910 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -416,7 +416,7 @@ def is_surface(self): _affine = None @property - def affine(self, ): + def affine(self): """ Affine of the volumetric image in which the greyordinate voxels were defined """ @@ -433,7 +433,7 @@ def affine(self, value): _volume_shape = None @property - def volume_shape(self, ): + def volume_shape(self): """ Shape of the volumetric image in which the greyordinate voxels were defined """ @@ -452,7 +452,7 @@ def volume_shape(self, value): _name = None @property - def name(self, ): + def name(self): """The brain structure to which the voxel/vertices of belong """ return self._name @@ -724,7 +724,7 @@ def to_mapping(self, dim): _affine = None @property - def affine(self, ): + def affine(self): """ Affine of the volumetric image in which the greyordinate voxels were defined """ @@ -741,7 +741,7 @@ def affine(self, value): _volume_shape = None @property - def volume_shape(self, ): + def volume_shape(self): """ Shape of the volumetric image in which the greyordinate voxels were defined """ @@ -755,7 +755,7 @@ def volume_shape(self, value): raise ValueError("Volume shape should be a tuple of length 3") self._volume_shape = value - def __len__(self, ): + def __len__(self): return self.name.size def __eq__(self, other): @@ -919,7 +919,7 @@ def to_mapping(self, dim): mim.append(named_map) return mim - def __len__(self, ): + def __len__(self): return self.name.size def __eq__(self, other): @@ -1059,7 +1059,7 @@ def to_mapping(self, dim): mim.append(named_map) return mim - def __len__(self, ): + def __len__(self): return self.name.size def __eq__(self, other): @@ -1166,7 +1166,7 @@ def __init__(self, start, step, size, unit="SECOND"): self.size = size @property - def time(self, ): + def time(self): return np.arange(self.size) * self.step + self.start @classmethod @@ -1210,7 +1210,7 @@ def to_mapping(self, dim): _unit = None @property - def unit(self, ): + def unit(self): return self._unit @unit.setter From 4c0f389130ad96ea61e72a27f0ca61d77d917c5f Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 19 Mar 2019 22:06:21 +0000 Subject: [PATCH 031/689] Replaced asarray with asanyarray Did not actually test array subclasses to see if it leads to errors down the line... --- nibabel/cifti2/cifti2_axes.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 357a6db910..92cd0203b7 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -153,16 +153,16 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, self.voxel = np.full((nelements, 3), fill_value=-1, dtype=int) else: nelements = len(voxel) - self.voxel = np.asarray(voxel, dtype=int) + self.voxel = np.asanyarray(voxel, dtype=int) if vertex is None: self.vertex = np.full(nelements, fill_value=-1, dtype=int) else: - self.vertex = np.asarray(vertex, dtype=int) + self.vertex = np.asanyarray(vertex, dtype=int) if isinstance(name, string_types): name = [self.to_cifti_brain_structure_name(name)] * self.vertex.size - self.name = np.asarray(name, dtype='U') + self.name = np.asanyarray(name, dtype='U') if nvertices is None: self.nvertices = {} @@ -214,7 +214,9 @@ def from_mask(cls, mask, name='other', affine=None): """ if affine is None: affine = np.eye(4) - if np.asarray(affine).shape != (4, 4): + else: + affine = np.asanyarray(affine) + if affine.shape != (4, 4): raise ValueError("Affine transformation should be a 4x4 array or None, not %r" % affine) if mask.ndim == 1: return cls.from_surface(np.where(mask != 0)[0], mask.size, name=name) @@ -425,7 +427,7 @@ def affine(self): @affine.setter def affine(self, value): if value is not None: - value = np.asarray(value) + value = np.asanyarray(value) if value.shape != (4, 4): raise ValueError('Affine transformation should be a 4x4 array') self._affine = value @@ -592,9 +594,9 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert nvertices : dict[String -> int] maps names of surface elements to integers (not needed for volumetric CIFTI files) """ - self.name = np.asarray(name, dtype='U') - self.voxels = np.asarray(voxels, dtype='object') - self.vertices = np.asarray(vertices, dtype='object') + self.name = np.asanyarray(name, dtype='U') + self.voxels = np.asanyarray(voxels, dtype='object') + self.vertices = np.asanyarray(vertices, dtype='object') self.affine = affine self.volume_shape = volume_shape if nvertices is None: @@ -733,7 +735,7 @@ def affine(self): @affine.setter def affine(self, value): if value is not None: - value = np.asarray(value) + value = np.asanyarray(value) if value.shape != (4, 4): raise ValueError('Affine transformation should be a 4x4 array') self._affine = value @@ -872,10 +874,10 @@ def __init__(self, name, meta=None): (N, ) object array with a dictionary of metadata for each row/column. Defaults to empty dictionary """ - self.name = np.asarray(name, dtype='U') + self.name = np.asanyarray(name, dtype='U') if meta is None: meta = [{} for _ in range(self.name.size)] - self.meta = np.asarray(meta, dtype='object') + self.meta = np.asanyarray(meta, dtype='object') for check_name in ('name', 'meta'): if getattr(self, check_name).shape != (self.size, ): @@ -1003,13 +1005,13 @@ def __init__(self, name, label, meta=None): meta : np.ndarray (N, ) object array with a dictionary of metadata for each row/column """ - self.name = np.asarray(name, dtype='U') + self.name = np.asanyarray(name, dtype='U') if isinstance(label, dict): label = [label] * self.name.size - self.label = np.asarray(label, dtype='object') + self.label = np.asanyarray(label, dtype='object') if meta is None: meta = [{} for _ in range(self.name.size)] - self.meta = np.asarray(meta, dtype='object') + self.meta = np.asanyarray(meta, dtype='object') for check_name in ('name', 'meta', 'label'): if getattr(self, check_name).shape != (self.size, ): From 72c089a7f6b03f15e9cafbca4e1c4807b916e473 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 19 Mar 2019 22:10:28 +0000 Subject: [PATCH 032/689] reverse guard for incorrect type when concatenating parcels --- nibabel/cifti2/cifti2_axes.py | 48 +++++++++++++++++------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 92cd0203b7..7a1e45fa66 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -788,36 +788,36 @@ def __add__(self, other): Parameters ---------- - other : Parcel + other : Parcels parcel to be appended to the current one Returns ------- Parcel """ - if type(self) == type(other): - if self.affine is None: - affine, shape = other.affine, other.volume_shape - else: - affine, shape = self.affine, self.volume_shape - if other.affine is not None and ((other.affine != affine).all() or - other.volume_shape != shape): - raise ValueError("Trying to concatenate two Parcels defined " + - "in a different brain volume") - nvertices = dict(self.nvertices) - for name, value in other.nvertices.items(): - if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two Parcels with inconsistent " + - "number of vertices for %s" - % name) - nvertices[name] = value - return type(self)( - np.append(self.name, other.name), - np.append(self.voxels, other.voxels), - np.append(self.vertices, other.vertices), - affine, shape, nvertices - ) - return NotImplemented + if not isinstance(other, Parcels): + return NotImplemented + if self.affine is None: + affine, shape = other.affine, other.volume_shape + else: + affine, shape = self.affine, self.volume_shape + if other.affine is not None and ((other.affine != affine).all() or + other.volume_shape != shape): + raise ValueError("Trying to concatenate two Parcels defined " + + "in a different brain volume") + nvertices = dict(self.nvertices) + for name, value in other.nvertices.items(): + if name in nvertices.keys() and nvertices[name] != value: + raise ValueError("Trying to concatenate two Parcels with inconsistent " + + "number of vertices for %s" + % name) + nvertices[name] = value + return type(self)( + np.append(self.name, other.name), + np.append(self.voxels, other.voxels), + np.append(self.vertices, other.vertices), + affine, shape, nvertices + ) def __getitem__(self, item): """ From e8bcaba0195c6040075b2f48fb8805276936877e Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 19 Mar 2019 22:12:34 +0000 Subject: [PATCH 033/689] Replaces type(self) with self.__class__ --- nibabel/cifti2/cifti2_axes.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 7a1e45fa66..b3cfd80023 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -761,7 +761,7 @@ def __len__(self): return self.name.size def __eq__(self, other): - if (type(self) != type(other) or len(self) != len(other) or + if (self.__class__ != other.__class__ or len(self) != len(other) or (self.name != other.name).all() or self.nvertices != other.nvertices or any((vox1 != vox2).any() for vox1, vox2 in zip(self.voxels, other.voxels))): return False @@ -812,7 +812,7 @@ def __add__(self, other): "number of vertices for %s" % name) nvertices[name] = value - return type(self)( + return self.__class__( np.append(self.name, other.name), np.append(self.voxels, other.voxels), np.append(self.vertices, other.vertices), @@ -964,7 +964,7 @@ def __add__(self, other): def __getitem__(self, item): if isinstance(item, integer_types): return self.get_element(item) - return type(self)(self.name[item], self.meta[item]) + return self.__class__(self.name[item], self.meta[item]) def get_element(self, index): """ @@ -1109,7 +1109,7 @@ def __add__(self, other): def __getitem__(self, item): if isinstance(item, integer_types): return self.get_element(item) - return type(self)(self.name[item], self.label[item], self.meta[item]) + return self.__class__(self.name[item], self.label[item], self.meta[item]) def get_element(self, index): """ From ac2618b1d3bf46e151e98a11a47161e55224a1bd Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 19 Mar 2019 23:24:43 +0000 Subject: [PATCH 034/689] Tests many more fail conditions and edge cases --- nibabel/cifti2/cifti2_axes.py | 32 +++++-- nibabel/cifti2/tests/test_axes.py | 142 ++++++++++++++++++++++++++++++ 2 files changed, 165 insertions(+), 9 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index b3cfd80023..e2ed0b68cf 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -167,7 +167,8 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, if nvertices is None: self.nvertices = {} else: - self.nvertices = dict(nvertices) + self.nvertices = {self.to_cifti_brain_structure_name(name): number + for name, number in nvertices.items()} for name in list(self.nvertices.keys()): if name not in self.name: @@ -178,6 +179,9 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, self.affine = None self.volume_shape = None else: + if affine is None or volume_shape is None: + raise ValueError("Affine and volume shape should be defined " + + "for BrainModel containing voxels") self.affine = affine self.volume_shape = volume_shape @@ -189,7 +193,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, for check_name in ('name', 'voxel', 'vertex'): shape = (self.size, 3) if check_name == 'voxel' else (self.size, ) if getattr(self, check_name).shape != shape: - raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( + raise ValueError("Input {} has incorrect shape ({}) for BrainModel axis".format( check_name, getattr(self, check_name).shape)) @classmethod @@ -577,14 +581,20 @@ class Parcels(Axis): def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvertices=None): """ + Use of this constructor is not recommended. New Parcels axes can be constructed more easily + from a sequence of BrainModel axes using :py:meth:`~Parcels.from_brain_models` + Parameters ---------- name : np.ndarray (N, ) string array with the parcel names voxels : np.ndarray - (N, ) object array each containing a sequence of voxels + (N, ) object array each containing a sequence of voxels. + For each parcel the voxels are represented by a (M, 3) index array vertices : np.ndarray - (N, ) object array each containing a sequence of vertices + (N, ) object array each containing a sequence of vertices. + For each parcel the vertices are represented by a mapping from brain structure name to + (M, ) index array affine : np.ndarray (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) @@ -602,7 +612,8 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert if nvertices is None: self.nvertices = {} else: - self.nvertices = dict(nvertices) + self.nvertices = {BrainModel.to_cifti_brain_structure_name(name): number + for name, number in nvertices.items()} for check_name in ('name', 'voxels', 'vertices'): if getattr(self, check_name).shape != (self.size, ): @@ -623,11 +634,12 @@ def from_brain_models(cls, named_brain_models): ------- Parcels """ + nparcels = len(named_brain_models) affine = None volume_shape = None all_names = [] - all_voxels = [] - all_vertices = [] + all_voxels = np.zeros(nparcels, dtype='object') + all_vertices = np.zeros(nparcels, dtype='object') nvertices = {} for idx_parcel, (parcel_name, bm) in enumerate(named_brain_models): all_names.append(parcel_name) @@ -641,7 +653,7 @@ def from_brain_models(cls, named_brain_models): if (affine != bm.affine).any() or (volume_shape != bm.volume_shape): raise ValueError("Can not combine brain models defined in different " + "volumes into a single Parcel axis") - all_voxels.append(voxels) + all_voxels[idx_parcel] = voxels vertices = {} for name, _, bm_part in bm.iter_structures(): @@ -651,7 +663,7 @@ def from_brain_models(cls, named_brain_models): "vertices for surface structure %s" % name) nvertices[name] = bm.nvertices[name] vertices[name] = bm_part.vertex - all_vertices.append(vertices) + all_vertices[idx_parcel] = vertices return Parcels(all_names, all_voxels, all_vertices, affine, volume_shape, nvertices) @classmethod @@ -755,6 +767,8 @@ def volume_shape(self, value): value = tuple(value) if len(value) != 3: raise ValueError("Volume shape should be a tuple of length 3") + if not all(isinstance(v, integer_types) for v in value): + raise ValueError("All elements of the volume shape should be integers") self._volume_shape = value def __len__(self): diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 4f7c9f1dea..c38b5401f5 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -152,6 +152,101 @@ def test_brain_models(): assert len(structures) == 4 assert len(structures[-1][2]) == 8 + # break brain model + bmt.affine = np.eye(4) + with assert_raises(ValueError): + bmt.affine = np.eye(3) + with assert_raises(ValueError): + bmt.affine = np.eye(4).flatten() + + bmt.volume_shape = (5, 3, 1) + with assert_raises(ValueError): + bmt.volume_shape = (5., 3, 1) + with assert_raises(ValueError): + bmt.volume_shape = (5, 3, 1, 4) + + with assert_raises(IndexError): + bmt['thalamus_left'] + + # Test the constructor + bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + print(bm_vox.name) + assert np.all(bm_vox.name == np.full(5, 'CIFTI_STRUCTURE_THALAMUS_LEFT')) + assert np.all(bm_vox.vertex == np.full(5, -1)) + assert np.all(bm_vox.voxel == np.full((5, 3), 1)) + with assert_raises(ValueError): + # no volume shape + axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4)) + with assert_raises(ValueError): + # no affine + axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), volume_shape=(2, 3, 4)) + with assert_raises(ValueError): + # incorrect name + axes.BrainModel('random_name', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + with assert_raises(ValueError): + # negative voxel indices + axes.BrainModel('thalamus_left', voxel=-np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + with assert_raises(ValueError): + # no voxels or vertices + axes.BrainModel('thalamus_left', affine=np.eye(4), volume_shape=(2, 3, 4)) + with assert_raises(ValueError): + # incorrect voxel shape + axes.BrainModel('thalamus_left', voxel=np.ones((5, 2), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + + bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + assert np.all(bm_vertex.name == np.full(5, 'CIFTI_STRUCTURE_CORTEX_LEFT')) + assert np.all(bm_vertex.vertex == np.full(5, 1)) + assert np.all(bm_vertex.voxel == np.full((5, 3), -1)) + with assert_raises(ValueError): + axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int)) + with assert_raises(ValueError): + axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_right': 20}) + with assert_raises(ValueError): + axes.BrainModel('cortex_left', vertex=-np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + + # test from_mask errors + with assert_raises(ValueError): + # affine should be 4x4 matrix + axes.BrainModel.from_mask(np.arange(5) > 2, affine=np.ones(5)) + with assert_raises(ValueError): + # only 1D or 3D masks accepted + axes.BrainModel.from_mask(np.ones((5, 3))) + + # tests error in adding together or combining as Parcels + bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4), volume_shape=(2, 3, 4)) + bm_vox + bm_vox + assert (bm_vertex + bm_vox)[:bm_vertex.size] == bm_vertex + assert (bm_vox + bm_vertex)[:bm_vox.size] == bm_vox + for bm_added in (bm_vox + bm_vertex, bm_vertex + bm_vox): + assert bm_added.nvertices == bm_vertex.nvertices + assert np.all(bm_added.affine == bm_vox.affine) + assert bm_added.volume_shape == bm_vox.volume_shape + + axes.Parcels.from_brain_models([('a', bm_vox), ('b', bm_vox)]) + with assert_raises(Exception): + bm_vox + get_label() + + bm_other_shape = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4), volume_shape=(4, 3, 4)) + with assert_raises(ValueError): + bm_vox + bm_other_shape + with assert_raises(ValueError): + axes.Parcels.from_brain_models([('a', bm_vox), ('b', bm_other_shape)]) + bm_other_affine = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4) * 2, volume_shape=(2, 3, 4)) + with assert_raises(ValueError): + bm_vox + bm_other_affine + with assert_raises(ValueError): + axes.Parcels.from_brain_models([('a', bm_vox), ('b', bm_other_affine)]) + + bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + bm_other_number = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 30}) + with assert_raises(ValueError): + bm_vertex + bm_other_number + with assert_raises(ValueError): + axes.Parcels.from_brain_models([('a', bm_vertex), ('b', bm_other_number)]) + def test_parcels(): """ @@ -159,13 +254,16 @@ def test_parcels(): """ prc = get_parcels() assert isinstance(prc, axes.Parcels) + assert prc[0] == ('mixed', ) + prc['mixed'] assert prc['mixed'][0].shape == (3, 3) assert len(prc['mixed'][1]) == 1 assert prc['mixed'][1]['CIFTI_STRUCTURE_CORTEX_LEFT'].shape == (3, ) + assert prc[1] == ('volume', ) + prc['volume'] assert prc['volume'][0].shape == (4, 3) assert len(prc['volume'][1]) == 0 + assert prc[2] == ('surface', ) + prc['surface'] assert prc['surface'][0].shape == (0, 3) assert len(prc['surface'][1]) == 1 assert prc['surface'][1]['CIFTI_STRUCTURE_CORTEX'].shape == (4, ) @@ -182,6 +280,45 @@ def test_parcels(): assert len(prc2[3:]['mixed'][1]) == 1 assert prc2[3:]['mixed'][1]['CIFTI_STRUCTURE_CORTEX_LEFT'].shape == (3, ) + with assert_raises(IndexError): + prc['non_existent'] + + prc['surface'] + with assert_raises(IndexError): + # parcel exists twice + prc2['surface'] + + # break parcels + prc.affine = np.eye(4) + with assert_raises(ValueError): + prc.affine = np.eye(3) + with assert_raises(ValueError): + prc.affine = np.eye(4).flatten() + + prc.volume_shape = (5, 3, 1) + with assert_raises(ValueError): + prc.volume_shape = (5., 3, 1) + with assert_raises(ValueError): + prc.volume_shape = (5, 3, 1, 4) + + # break adding of parcels + with assert_raises(Exception): + prc + get_label() + + prc = get_parcels() + other_prc = get_parcels() + prc + other_prc + + other_prc = get_parcels() + other_prc.affine = np.eye(4) * 2 + with assert_raises(ValueError): + prc + other_prc + + other_prc = get_parcels() + other_prc.volume_shape = (20, 3, 4) + with assert_raises(ValueError): + prc + other_prc + def test_scalar(): """ @@ -230,7 +367,12 @@ def test_series(): assert sr[1].unit == 'SECOND' assert sr[2].unit == 'SECOND' assert sr[3].unit == 'HERTZ' + sr[0].unit = 'hertz' + assert sr[0].unit == 'HERTZ' + with assert_raises(ValueError): + sr[0].unit = 'non_existent' + sr = list(get_series()) assert (sr[0].time == np.arange(4) * 10 + 3).all() assert (sr[1].time == np.arange(3) * 10 + 8).all() assert (sr[2].time == np.arange(4) * 2 + 3).all() From e00143b2eebe8faeca772587f8e11701fb8364db Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 19 Mar 2019 20:55:46 -0400 Subject: [PATCH 035/689] MNT: Bump minimum numpy version to 1.8 --- .travis.yml | 8 ++++---- doc/source/installation.rst | 2 +- nibabel/info.py | 2 +- requirements.txt | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 367a105045..fe2cdf0b97 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,11 +33,11 @@ matrix: # Absolute minimum dependencies - python: 2.7 env: - - DEPENDS="numpy==1.7.1" + - DEPENDS="numpy==1.8" # Absolute minimum dependencies - python: 2.7 env: - - DEPENDS="numpy==1.7.1" + - DEPENDS="numpy==1.8" - CHECK_TYPE="import" # Absolute minimum dependencies plus oldest MPL # Check these against: @@ -46,11 +46,11 @@ matrix: # requirements.txt - python: 2.7 env: - - DEPENDS="numpy==1.7.1 matplotlib==1.3.1" + - DEPENDS="numpy==1.8 matplotlib==1.3.1" # Minimum pydicom dependency - python: 2.7 env: - - DEPENDS="numpy==1.7.1 pydicom==0.9.9 pillow==2.6" + - DEPENDS="numpy==1.8 pydicom==0.9.9 pillow==2.6" # pydicom master branch - python: 3.5 env: diff --git a/doc/source/installation.rst b/doc/source/installation.rst index ec942bd043..c853de9619 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -87,7 +87,7 @@ Requirements .travis.yml * Python_ 2.7, or >= 3.4 -* NumPy_ 1.7.1 or greater +* NumPy_ 1.8 or greater * Six_ 1.3 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) diff --git a/nibabel/info.py b/nibabel/info.py index abe71735cd..36437ff5b9 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -186,7 +186,7 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): # doc/source/installation.rst # requirements.txt # .travis.yml -NUMPY_MIN_VERSION = '1.7.1' +NUMPY_MIN_VERSION = '1.8' PYDICOM_MIN_VERSION = '0.9.9' SIX_MIN_VERSION = '1.3' diff --git a/requirements.txt b/requirements.txt index 061fa37bef..6299333665 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,4 @@ # doc/source/installation.rst six>=1.3 -numpy>=1.7.1 +numpy>=1.8 From 57a70408de870bce85085823522b27362851696f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 21 Mar 2019 10:32:12 -0400 Subject: [PATCH 036/689] TEST: Add de-identified, compressed DICOM with empty string in SliceThickness field --- .../tests/data/slicethickness_empty_string.dcm | Bin 0 -> 41726 bytes nibabel/nicom/tests/test_dicomwrappers.py | 6 ++++++ 2 files changed, 6 insertions(+) create mode 100644 nibabel/nicom/tests/data/slicethickness_empty_string.dcm diff --git a/nibabel/nicom/tests/data/slicethickness_empty_string.dcm b/nibabel/nicom/tests/data/slicethickness_empty_string.dcm new file mode 100644 index 0000000000000000000000000000000000000000..c2718cfdebe5f9c3b72fddb79e2cebadcc3dae7e GIT binary patch literal 41726 zcmeFa2RL2b);7HL-aEn8JA3QB_ufTG^xiuWov1+&Aq3HhXo*gg1kpnxdX4CU=q3DX z^W-_tbH4Mw|2gk}eb;w=|7BTa%sI!LbIf~=F=kmTAkhEWLsm&f4IU6dPZbd~06gJA za3Bq7*b_M5;DM)}5;+LU$;~Oq3*m%9AP_-LK29hnFK_{OARPhV5Ck}QIHBAC8x#r= z064fgd3iYb`60Z#oIF4#2n5Opg>dsg`FMGu0(`te*HHoj0`MSQAV2Xx)c~uHms1D= z55fmh?_8&H3-EJ7I0d+Q1R+oePy+<0k?&dp0+5a(GDt&5Nn7bIJct-1C5H%7kd;wG z1tEcCbkIRcIvS=*+F(F2`UJJ`_FmBff!7XD9{UFZF3h}YjL1F$!jiIYd3QbFK25PFF*!dO?6a| z`5$$nf|39ZWRRwZy{EG|z>lY+1p-|G$K@4h0ro@%Y2y9A^_(8S@jumRe#qq&)&k6Y zF0W>eE5C)m=}+06qW>|I^JzV=>rU}a4?1u(Y?``=PL%)RVg z!B9a?K`0;CSWQk=Mnes(EvF-;rl~3?dk+;vl>EQ84Fm_X1t>R^hg$#@1P_D|L4YX$ z$RWb1Y6ybly`7xEHuhH5PWE2@V0&kCTWfZ(x65ydtgXPV9$+UxP^Z5NYHjt;ZNdfY zR#W}2J&OzI>)(427X-Q1NT`qygbM=Y!UahJnu7>Z=aK>g`EjKH8t?;#Ku%m)fQE8H zfOyECG%1J(;70~YYfCAqgGqt%VS1;aO$t~7EM7;`L_VN<%4FR6;AZ~6_KwDvQz#JDT zDEvAW3h+e$6gq$@4<|PtAHN__LnKfxJP4{t3VH;17k~n|qOSRQ04fHE16sq+4bV^i zOi#H^7l4q0Qg0|eQqWU?0yqU>a)GY(7BEsMtTZqX!UlQzyV$yd)wIE~P##$z7G@*( zAYBc4Qyy7UXKyDjdk#-0a|>8*d|4@cz*j(oAUqIWlivi80*9}w26#|z9>8^S!rU$d zV)C~Tz@vo^(-Ub|D}PcDE`c6^CI}P)*9Y85LHGo)KGW7P0Qd+AU`Rm)ft~?k;X$xo zAqL`rUgP8z;Nj!tgYXLp@bGZ+l7dLDd9Pch1uH>OUI$DDXbr3dzdZ)fGDSHlz#9W8 zsJJpZn4oLt3s%=t1M>oLXh3^!^a2dR`oo(7;|^@-f#GyL2EiUTf$PVAaRY;}|3Bu( zz3Fo>7=(AzzYqvO)C-1HH-$;^*T8ltmE2Bgic%z%OLNApk@Geq4Zu zpN|_V$Zx{I&&g}e0fF#=K?2t~I3S$-eEi(p{6hQ?Zb5DV6JWRlAm!)b;|HRI1cXc= z*FPLkFi0C{4ex(9>HO6sy2 z22him=e0C6Fuj97SgtjR_#c{d;~sCciJJwG^>2E1ld7SizydP6)+xZ@fmuMPc-QUy zGZhTvM8kvC3;+hcu|9o8C`1b2#l?foi%QTyz=$TNDT01el?XZ*IOLU76TDK!6hr>l#=hGn2EmmA9w02NTP6==#9~ z_`&O(zn^k`mUgi5&%)K*!-|RJFICFQ>Bwj+X6uO&kSJ) zGqV7hU~B^aJsSj=t#R}4@(A#;g8^S4#4W@xctb;emmqXgf)Z@Z(eSbMaQa;XtQK}K zfEF02@aCkyiG#)e?Vv(zCyVzdCE+E7XhC+FP z8w5ZUa2v27AQBYH&kh#g`5l75qF@|6z{SVS4i*BQyg*()AS)k`$oD%Q76)YG1F{1X zVSoTiyb0g2Zo!V>?;e3J;v2}B6}t^#1b5f~;l zEE_E4rcywHKby__CuDAR_LlM<=FZkqZf;I8t}b5Y_Ab^QH|ht=aIJw*?mzkU^_67p zErHpQi@C>*mI8VK<$);+jNpY4fYcm7_5ZUARsaX|55B`Wd{S=3`+!n11R8xW&KkvZ>ZlnZc63nNtms?0s z=x+xsl24HTZ*c(dz={o~LqI8il=zo&{vp8spv0RN!tCykw*FZf$F-|k(E!7PY5uPKirMr-!KPe^Uew$uHJ6{74X+a@~1Gr5r+VE-XIR#VX6TE zv>(elT-}{9#sqGp%bw1Ue9w1!jY=@Uu}xT?UF!h^=xIPRU>X4ccB3gkJ-;>bHw8R@pZ|+t zUTgbJ1{nMQal>R51iJBB5g`D<5A-(Jx*J`+v6Md~fC>4RfemJ- ze;wk0&iHGM{|yaP0o1?^IE~*E8`$%Deg;D0zMkv-b&aa^U#>s2{>A!3>o4mMEf5AU zZ-ni}aD!QS1bMYUm_Qg4B=4#Z2G+N%zy;ObUNXOh(T*lRd znS&S578WfK0l$0VwT)?813$~~QM}s8c zZuTT#%bovx{Sysr>HV*}1+aYy@&DPr1TV0b6abbsz|O+|$$f}_UcF$0G^JGKbamyx zG8(EH+Q3TUX8j@wtT}GhFEHw6m4XUTH>(sFb+bxA0jPhhQsD6PfPDsFX#~3F_*)3! zZy`8*4Fh1g3~&OwrU-~g$SA02=opw-*f_X&_ymMR#3ZC-

)(si?s;v~=_gj7-cd ztZeKYoWRBiFmLAL7Z4N@77-N_mync_mXVc{S5Q<^R#8<`*U;3`*3s3|H@ItPWNdQp zzNwkHg{76Xjjf%%gQJtPi>sTvho_gfkFTHqgMf#DLBS!RVc`*xkD{VuV&mc;CnP34 zNlr<9n)WO`<9TM*i|m}-mwEXGg+;|BrDde~9o=GOL)o!z~k`v-?d z$0w&}=NG>&uK*-(vG@}Ugj+2Bz+xd7_xjDk@4Z;SQh*&&(847Gus^GDr3GSv#Qd1n6(5t z!MGXzp#(S?yXMn9Q)JY3R@9nibY#;=cX$|Nwiq3$$;XvXUl+2DoQqlngYycR)w_E9 z1hgPJ?>9&j-+JJb*ok~6HZ!{4TYtq6iw9{l#9Z>v)8lfCaHKm7i%pSY*;11?+O$*A|^YtLamII%1y|$5R1+;+(9hODK6O^U(=(~h{fzlI zFDg7hdKxRVRF#C~%r$tmC;?Zg#LEjM+&}N@3+j`hQK4e-hv6?PnlNhwdOD9gWKi=F z_N~<7!gsw!16Z}#gqYH=7zx-|l#mhO!}o<=hEYyzpnJWz;QV6u`oY`q2@%w#cX~D# z4Z-()Xn*2o<<@i(goikOaM8nQq0JTy-K@6Iu8wk~u9%94+{w(+U%mpp3m&#H(>F*-`?lDpzBa|>SbGHmClaCs9u zB_zsF{CGblpl})6bg{k&X`ZJEAKteIr!c5eIS|2P`;NVKjD<2^lrA4)UX)ZVXmFRy zk*Yq#CWNQfBWqk~?0FJix30Ktwc-3^zk>Q?mOUZj?6-+;b}!5VRJWin?{s=_Wjxl8 zc^)s3^}ZCPR2|394dl^cDo!PKhdR5R|NaBq_+UIXy7Qq!(h;>ePhdI`|@uwm1M{J6kY+p_Oeoykx?-+pT z+aEiQAhX|lk03LmKXzO}X0*T_9~DT)-WqrjwJ?0Et&v#$3fVR1Jz0KgxZfgsnTxgyLt+#a_G+#+y` zz%2r|2;3rYi@+@cw+P%KaErh#0=Edx+LcL&)wOymW-ve~%f)6^@Tz0Q^?+zkMv@;}w|Y{M+mysmZv7|3P~}`*#-7 z{}X=Tw`~8j1NMIC_P9mh7J*v?ZV|Xe;1+>f1a1+yMc@{JTLf+qxJBR=fm;M_5x7O* z7J*v?ZV|Xe;1+>f1a1+yMc@{JTLf+qxJBR=fm;M_5x7O*7J*v?{%=6w1o)$Roc};j z;Oncv7cKt%<3Hwy!@ct#l@EtY|G$}E_CGBD|91ZW-FW_|{fqb?;{Sg)e=zQUCjb9t zd;CxR!@uttZaHrexJBR=fm;M_5x7O*7J>i2A@E;)wML3jw}I23)_7e+t8) z{mZYGY2LkS_Sf@2{A!tK$Msjs-Z0~7>e}FlG$7gtG?bN1rL$T%5W#ivcDQGoJ!!DH zBPafP^EqO)u4-7h;>zbUvJ!erQEKyFcz453gav$Yt!#N++&a6s!wlco5|}Lc3h?I{ zJg}gkLJ3PY+=}sfqOFc&dtoQ@RJ?J*yL(~pIaS6wCbIEplzE{*R%=*9nT{*fc9tK?-Hs1dH zsObx4gYwr5nlo6P(XsA@v#0R&3*Kx(&5sSNSEAugOai{g&4#_)uNFd>a@?z0d7WJ} zf@gq0oF`ZKqKbdU76D&xoH(brH0UCPK}@6>&(Z(f{7id$F@xCe3TxwBG4a@x_^9Pu zMxF%*?&Q$YYP2S?Q@UM|-n>g;M~nlzt&goWf?Wyk{n4nYFQ;%Pm{IoG&y-n;L2n7d zlJ6BCW$_Y1P6SLEO*?IZ(1bN4e zmH&b;tUIiFsD`*oc@$oDe8!u2HHEb|GbB0vC4CM1*VoCJ@o@cJV&29bPUgG&E5hHl z62>0x@Ev5jy?-i0`K7+&=UygQUli}OyJhtW}$yS8H0W~~QCS}ROq+Kl) zWSDg(?=2sKqyv$4U#hXh6j&f4T@Pw>H0P>;u|0f4~t?8CYDxxzm9{`Yt*?#>K6k4&tpD~2ah44EsXub_d8agekDn|y}g#-h|)Y-AY7|xNx`}I9UhWdVk zP*xhuOoeCj+n=znKpMYnA)5s1t``H?K{85u2qHm!+`V_tRl^D@yK4G!sMQyUr6#3Y zHFGvS1oQJ*?ra0CZ8v{A4#w`0mk@VV(M{=R46vhY+z^_CBCF^OuYET=u%`$?`APQ6 zxh~|xwwBcU^;jd`M3-@%bi&;i3kn*`;{sulIC{ty_%23yTSY{Vv{9mZhihH!XFbu& zsS-3q#9m9>H?xk>B5!IooddV53%l;E+2#dDr?r1Bz?NG`=Dzc=6Kncy^L9z(;>jac z_Xj8oN;^azuG1X)+=~x*4YasRzCB5v`q_}tnIJQXFNF8_G+$Uoqt)f|D432)oxGcU zZtOE%-m(#^HvI~jYvxeK6-&m7(*wGp%o3yOq@Q`Te1g1hA2EMR+Tubo*GJ{fSYAz8M?d1KD6{M_Nz64)NO7`jA*$n6T(+5v_qewd%Sj>;g3+F@9 zqZU#T261J0dF%&R4FNe&}D+Atu zzLk$NIIk9h%nNiXe@WY(R8s`^>8p-r&TgJ7ycM*W%e5ozSj4QD1O06O634Ie9!KY8 z*Zorucwl{2rvge+72ms>TX`Kf-^clO*?YO3&SwSBLo zzq7eU5L)`VT$zzH*#3ZHv?@+(y}Q*Ei`&_GTDP5oa;Vo5cx3*@?E*=qJm$xsemJAk zBHJTB+rS{Nf`nLQEEm;uHzvvVY5~d5_&i7p4Agj%ntuG`LgfS<>#n?gvmuoF zFJjJ}OpfY>Q#2oV$*y^i;fT;j)+9IE z?``i4i(t2uKHTNpFaFL-8Pg*)R4?>-^sx_DN&}O>^6X^T>%u$iw@Ek2eh%KQSUAMyoQQ$n)tsYolABaGX~kUmQP_ zy@0E0wX1mQX@n{`h>}+xNaO@!m)aA3YqHTw`vU zM6vkoj~y3xnypLf*|^SohV#T4*^#BQ`$T7E2Amz@Nk}smUFaW3H77?aD6_kw{4qnO zaVh#iNky}cHI1={bE6XQIUFw{U0<4pDUy-JEk41e3kmn9(thGffg0AL@XX2o*#R#_ z6md&((_*HXX<9K-=@&_RdU*Gz@gD7j^U?^p!!-4K`Ckhtby(Mu16kznh|k?G4$9)Y ztZnV<+TM8JWvMN{Gq~FL9V`-3qB%(7TgS{wP?oxi1)avIH%}-~nRcmo&STRP(@BNt z{!LJn&04j*gNIC;ci`odmYq{Nt%V;Gj_%!oU(8;ln4Mu_>kx=_OC?h=C>FpEvJ#-E zRUaaIrEo0ZeI=Dm5J5L}Agv2{S>}#uP^kPRG%%&Kv0HYH;pfLFm5u=T0X%1cv{h~9 zRg-p=SDts?n>~!3S|52$a!C2LT3Ol@_<$`$LW^=KHq1}a>*yJBK1%y~?uLJbvske2 zn-BIy0=ekDpP#BlsH=w2QNC;dG4!ak{UUrvINYh;)x!9wD)7U{g~YG!Q@cK$?ow9G z3y$0q)ab%$Z{sAxAb}4?%o8dd6FJ%{A0d`mmb1ur9hWDEQbqD$edEPwlm7Z(6|7t^ zb??XM6F(sgBKx$g2^Y6}+N{{k%A6E>BUaf8Vambd(4azBiZN0F9E_%=@@_tDFS%;V z!?)rc#SBrJ=T}HCi#Zd>*3PTW589kCC}mZ63+bmMyCARVFL5kTuIx_>+KOrE-A@$uD=p1fZ%p4tWTt&!odU!jwzbXi~^zE=um>Qbilf?JD3 zU@%3PBA4Nxu#J`<3MuACXTSV~l()Xp-jtd_8L-IH zO{~n+sikLgkG|mP?A%)crV7RH5z3VGM16GD^Qby*3K)(@`1Fn6xJEVBD+G4R`&Wv` zx*sD1fBgD0PRvnBbJwCjcJZi@scuQx7uN>&9kI`f06NlZgcvt${5SZC180XW?Cj0; zhh7vfQf5f5z*dohm>*b{22C|D_#bXST7{pQMSHuELi!P0mkqKTGGyvgN#?!onPCvR zlAIL;Pd)t+`B4ab>GTY_iGQUp4cLGc%5~a4DHNdBX zSXPzkM;8rFUwy`d@W2bw*wK}OjTswEE6|2?3b>IR73elF5_@7U+(eO5Yp7)3rUa$ z>`f>z`N%pDJ(6mS|Mkm5zd4Tb9v+@Mne6Jl=`er1ukE;amCiqPZNFb+04mu#k&bE zY}?70J(RfibUn`@Jo=~;nt^Asp zSm5V#JuQ2+Cq=(xmCLAy<(cB5u(9~&m9OL+xA5+4ztf1Fm;XHf_J!L?!?*G2( z1kr0^G#;(A{lL1^qRIq>RWS8*!q@+%{n9d_T7NliVuySYE&NSi5T0CR#cp7 zXNX&kG>yd;yS(Bcl&3!&QQ;$x9N#1799OvKW?#FsB3{GE*Z{BL#^6#IcQQZw1{oUs zy*CFv-Dp|3Uf;880AkoV@LC{Hn2&%KyP5Rspl{LPKJ#P<)rgV88(nmDUONZQko8)0 zPYtF%POJ|$ms#XCkD12qKGtJmPZzYg5-?4fPy#u;&<)^4CP`at zL6knyswQI-IB&zbA`uT&@j{@SGbC~ z_BC|6_6IE~qc2`*gFV@P8(NvH4Ue89p>zPs>nky#@Oz~YGkk5?ol(kWRykMha=W_~5&X37fpm@e9jOPv)jN(>zGl%h%V_baLq&8Y)9P-v*VCC3o&7m@Q3M^hR(4zWX z5_r}3qDQ=<(R@kaUXsT1WKt_;>^l!52|;!CbCUbbqfSoeQz85_1cQb}e4E(Qr&hi* zlq2q{$iv&)=e`wh@Ns?)5pDZxn`ymXHZ@*{;`QcBMa&Fb`S@qObdM%*MT^qCv-$R? zw4ZybiR_)@ckWU0Gl&KJt%D$|4%UupDtIHx&%k^id)2-oU*5p+rAevvV)qplw9lY$ zw;L2|C{U)w!M$~E!q=`mLIjV<#~m=bTv$0lqtQ9!AjXW!IJg`Ca$apYnyLC!n<%$0 zP>s|uQ8rOPWatZcuB^acN;YP-ih&Y9O`FvwWHMV2)|XV z{h?YqTf>?d%J+2R{y}VT)W*3R%YfXB0K2hCS|$6N<5T7k!4YWsM_N^_pDGy67cH)2 zO>H>kqs(t;YZ|{()O$@Q>~xV%#KuKF;g`d{NB#NxNLVCYnOjf5dLThPQioWEuK+|g zBv7W-r+h^$d!3H#gXUc*1X0inWY4c(RT783SviFb> zFwd>nxQ0(2%bb_WXGT_Qw9XV*C!&Yaqdj_3yd>5W`Rh&5$P>^90e|;}9hc569*-RV z>9+!XDrprB5;IM;sE;XTkVn$qm4{$Ek$A$LIUT&4iBll_S(48{Ir@B|?Q1;?2f=u7 zRqu}E@`88a<41i}{vb%+CspmIIS6s2aY~~1?N7oVB^6KaX7AH}c!*ZKE*ApYklguR zJM!ezrm%)pgt%C9FUa9>gTcl`C{ai!QnP7G6P1S8m0PLrfJmEFlVU@|V&lCu_f)+) zfnPkqgoF9wtGFnV@G>TcO+Di>YP_S-llGQ`KjgkS_)k4LUOm#bp$^=SHP#CoOtL~! z6$s3gmC>ep#-{rwafE*Iuza^~!Gb>fMM2dorj`@2XG7kf;uuxj)CT7)TlmWM$UrH( zNIC}tST(hdJGhPn$iA5&Xl?KBOrGMktdeLdQ;eHBgjzfq&^@H$Z6DLF2EEj|==pSp zN{z$3VTm_xMP58*GS#;_eRg#0U`?@m zz;PLrTEDyL*N5=%3aqekkak&79kPZKFjC=0%*?2FcSpijTA=-)Y>#ucFN8F9F>SDj zDR6`SUIufy-2}qt)a|A`CfB=SnCR;XqxMo*S?Sn``PE_WLj>I;CWD_y6S5x;SPRuX zF@9!f<7(0}X}WS2HPv8)+Ai7WIozsWOgT+QK&j8gpYVWZmqKzO3~HciFoY&Z%6|`8 z>dKN+N57-Aw=O=pQL8Sxava`=(PWSRE$tHZhdi#KX`0U?JPe6DC$#A)?CxEA$AV}h zwUl$g_)RZ6OXuJ8!e7oXf5YbV!=7Cuz&TfMDPK{HXDQ6Gu#0_F4_f+&b(s|23N0$6 zQNzj3PJU;X2XatQiHVA$vixw@*wXLHn6hOc`Q&pHrlblUVaHNz4{>s3QEz&7hDCte zz4U_MAe$=-_qvFb&4!i@ zA5VWv)VgzK*ChDr@JP3Y*!s@rd$fBg9ZGk_(|(Bd)Ck}C<)=89P`x-bxbr^C&bz{S zlCzFIfw+SSxyt3+PdKYNL3;6@?&3t%Wdm;USHV|c+KOg`HoL9klm-Eu7xHIa`*qtd z4!Z6$4zUfRU7n83Q=-xoJv<0=r6KqZ7JP)SqU?%aJb?UZ_5hPKwrJeqn<>NXzx3s~!Lin~5Tn zGN>3HBb5=LGpI|wdJh?{BKnZ%K(`zA`0DPK_iip4&kHcqndA8>on$~=>Cu3$MqlI7 z%*S#HeY(>ON|q=d4_UPP^cO79el6W>%`V=!^nEuZobZa2@Q>l#jf;G2xQ{5Yq@~Y{ zD0j*B_ipX8yU3w-dLEntyU1o1FF#9jyf|CTgmA!fPDq~#~wX}Lu+ zpKz4Jy_N+chtT&-@$oy3p;sgAIUeMXMUrnSY`m1LA^yRYktQoC74=lI zTWM2!wS3KWN;9eHU7tyvt;vyz*Lmilee=Z25AN=~xt)hC;n_@=t2Hc;Sgt#h!YxxnL?r%*cS5O!~2Ca0cunQLvvO&%LDL6XiI1iI8 z9fjG=`0sz3@GokM8uI)@el7cl}RhKkx>g?twrcB2;A5Q2` zN%&84{?gc%m2k~YN^utpaT2)F(UH%0fcaac-Ozd30$x=NZE0E8eeB7shoBYu?Cg1x z2l%bw6SB%aC$ga?mP$Beh#rROv@t&A{iuvA*kU;P)w2rT9%cwxWuRL-oN3NS zP_m|Qj;PL+=+&KyFbh*MZWv%UEoFVVz)cvCc&D%a#SRg%S0eu z>$R?}*LuYk9pgJ-Q8B5=?#N@#C7-bteRnJuXq+*O4HR~*6`4bVT6~UA3oY8<#+GxKcpH~=Ee&y7qC?}QMt!6!j$zTwT zyBM>q{?A717!-v{4%-Y9YpJN+U$s$^WLq;-I_)CmOG5{FR!;Cmtu?Nvzlxg8Y4&Vu z=`(6sYmsS=(RSW7GAA=danW+oW$x~7us^UFT(B`t4M@}(5WX@lc}(Y_YRzvd zWk?c?`CQ30I#IFm8~3c>qlji-_1*3ekpCL8*K58bbt~Zll6!S(53rJ zcq^=RR}yw57S7#p^DcQ3hi19d(idBeo zi)_oWL6m$rS?R-^LqCO|t8`|Dq2WiErBg~$41JKu{2-Ui-tv;eDyP3l#j^&f1}dW+ ziblvt%|*t)5iA^K`CUbW9&Ne!%A57^IMR^$&I#4?2U%U}`Z`LPT<4tKhB^JFKcZ#( za2^G*&poKRkN#^uedx+5&MF{#%z8PFrQ=b~uwQoyFaJ`=Mx?@NYQ1F2Y0TYElwarr zuPkF*7x&mTJg@XmkfI<{yJ6#}nR{Oz+U*XeBpoAFLb@HW8OBmQ#6PK0O{a^#iu$?| zOusYvS(QxXVygW!vn!hoIEUJjpxQ)EUxBl*caa&4aY` zv1GrL=~WYF25#k=?T!;fSnFZ3NH{jKIl}$8l4MJ+cTX<%Ml+8EhRlkcRh{oC3a7h* z8v^^3*(f2hF`OGlx08M2I+tHu+?H^7$a!NI8H~ zd%sTM^q9!J5s@I_gMN85{8KX8(1#JA``YzFdWT;K*BvN*+%Qgr9uJ&je|b?yzJ8vt zPL(BWhuizXv#g(cPd;rdW}Grdmo_OOuN+tn4ee;0@JK1U_pg{RMtrbX@1ztwUZP2{ z4SV+$5p(hseY~=+h4+9WewIFFs4wq}PA>SgiFUJuQA|Wl`NFU~8_e~JI;@kHg1PhI zh%sGxAn3D%!R~T}N>eEB&dU+VbWG-h!=6jBmkvm;HyJ*rc3i}-v zTkmpDF7?~ezz50@n>G01j5lOM)U;|9dv!h*pnRz*Z76cca@(kN4V-zf`B4Ys5|K;K zI7>D!0{!WyQ*XsrDyq%%)aP$J(lk*_#qhNs61Qft3}h*DOZb_uy#=FA3kd3p^Coee zImmGjQj;FIZoPP0jN+6=`MlbgDy$XJh5BfhDz0>iHHy&^lB#X<=(JEIJT;+;bD`$f zk})*KqxAU;9@JGW-Y?ssQV$gc(YaRccQ2?f16sk`e zF?k}eF!4+yOl%oS&ML+Dnd?i_S--2=4-R)<5hGZozqfc9YKc}tj>shT!YAYdotH_5 zL~+EI?_oWGP`0ui?(P=@kv4%2nPbj=I&_jtC==&=YhoZ`4aUiUnbJD0xEwFx{pa1g z)O&mU+%*t_?xIJcF$DqSD37ojt&@cgiwX*S$$Zp)K!WsF%2$7Gnp65R3d^_G7R*hA zE>Mbzsd@3q>|qG71iVqLKZ%Y&bJHHl+?9`iTRhCk?^{`U<=c^uO@ZY*C?NP|KWhsu z=$_9D?MIN$6WS^g2_k#PWpCDo*;n*a$-WmW{PL=?3XESo(DxDQ5T)jfnO`iMrp758 z3j?9snv0Ieq}kg&AbH~Je-&|}t zl=72i%Q~u{0-@ORl|z?xOlC=LUeAaYC)1meWrnXTncw4d4OKp#c!B%4ArbRf0k>?4 z@)T7x5B!p|bm^5Fcd}NN)a3}I7ssZrHDdQJ@5CYr>adZ!&-T~}=AhCK;*-7ajunPc zR-U~%xmA4ldg7`R{>o4meaDghGH*J?36fg4ED$=Ku{FDcBhfQzogW*_cSbO`V@My8 zY+GrUrug1>%Z-2GVDiz@BPw0=HQt`CeOul; z^9^=eJyf^*Y&`UulbaELu6;74;i;$_nd%PF1VSvHJg4;RzWW-XP<@IIW`}Qt!;gOo3?7MqgF07LMNBcqF`AnWY%iIQ1 zjpOR+*E?&e&{-noT+>l6gwAADSCi4`2Q($&a$NIP$~lgEn~{80qAnMjN7m7HtYug- z_{l6NespO%y4s5()wL`5GWCo%0oIQB+p@^gBXAiI`J2LfeTlTyxUa5|VOm?BVS zvQpH~F3&4ZMoO2xnUEFRrkxq*FC>n>DLCUqwCoW=;Eo$sdt(}C@b4$Io0Yxt{tOC7 zdiBAbfji%z{1n65*DA7TsxbQ_)R%v+|HTGcb@wFxq=ppX19nYQPEHPwRENHg`YFb$G9$q!Xt0 z2^>^XC;XV;JoI(4b8AKPG;eTtL==71Wk26eb*&>Ys|gjP%;aZRqsGXVPOV%k#93a6 z&e&~8l)sINquph~(&3C)q|KPEV%@a`J{5nE{4_z$K=M=-!Ku4)zPQ<#7wg3EzJ9ut zQ%x29Gr83A0=%#Icb^~4a;_soUq4+t7o78Y-$oJSsZ`Zvjv&0yVd~76JX|R@+H+-- zoa2yYoAB(kPBD#zsYtYl*rT=9iM!LNr7tDwDg- zymM4!i>)W_p>Np^7r(nS##V5GxMtoUwm>4w)ioE7&H1NopUnl*IDy#E2y(vu3YBv$ zNVrT-mR)An$Yo56XnsA~6W9`HLHS_Sec5%kP=zw^Ge^Iyal#`xW=y3)0mDAFA1{AR zY(kHQx7575Kg1DvQjTknn!)qa>ffJk?7)-b_=b&rRPTY!MQKiDie8yEW4czRlT(fK z45_go*-;+cW8;l$9b=VLz|UHgQLz@HWhy3#S=1uVI($wh0qhXiAXyXPQ8KUHN9O&m z|Ao;}C{^sv_vXpo`l-v31fN38ZuZvW%GR@!Utd@;nlMh3eHI)-o;hxFA3cM2gk)IA zJ8NB-pfvg;3HwKvZRFV4<+#J;qo(2gpqhg7FnBDQoc6TPRL{kR5sBH-aNCmw=YYbY zIn**a_{;K%6MmP9=X#)v;fvSCa+_WB%PYw~mr$0DBO=LP~E%3{drUgMBVvF(+y*S_CC{*Kj3_Q0*5qoVItF;P0A=(&3BpX_1;r) z`^r_;v$t|ySQkxNSs#f6s*aTATSHE;r<8!Y)G;(%vx%LL9ydU6zZPR`bANPR31m~ew_57uJx(RDskm7mcBtKmr|d{f-@BztiG^gNKCCxZ z)VLsUO)B<9ASItwsomFog-e^K!f5hkanM_S2YZLQuhdMaclKp)r;IeD&IJQg=1_8> z5M2%5a~Yqpavs;H8A+#U`A|O*^iAXy#xk}`?nP@r^OcvnZeLdeu?M039U0AR%*oHB z*|V`kB3-Q0;aRS`BXy`a{hd^52K8UrK+bEOHhHNg6kI}ZDy|=b1~Q9^r~-Vk105bn zX}`W=rjKD7eA(#K=9`IB{A;By$S9HQk{BxqjOV>FL)n}b9_5Wt*)Y78)nZK=>37fK zft)fL8~hPpDCVjSKUv9cO%pi}UI6TQZ z(~G2D+QIMvUcWxku6KV0E{~tX80kzVeRjG8|B1~RiJXypu`Ev9IS;@Q=|w|Bjkz&p5KLf+-%7?IUTLr;()H{;mb zMJ^HAN{eZ(k(hF2c~Z;jpK#*K9!?@S=`j=;t0j-e`Ag<5{wm}WE^j@h_xw{uQJwsN zAePdRnb@P?guerA+`{99=CVl_Putv ztXLlrXVN0_E*!)X86TT{u&W$q0w;FSK4!pm7oM{7-b&3yM0hJKTLY~NkNl`9o%Ig$ z>8y&iH^LxSa!BvT)~_Eqgt{&!`oDiNUX(r{_2!4xSlB2SRjKcW_=&ucvvz-^NmyC3 z5EIv^@`K)&v-nKqTb0F}CqY#XksoOS9u^a0IVlm{eCfwF0oLtsgKy!{2^7e-j`gk_ ziBgJuN^+Q-C>TVn-Z}yLi%&MWQZ5J&lMUw%Bb&f zK3RM>Dx@n_c~8mqC9;W#jBWV!6~?gd0OrBG=W4>&PmV?_+?`$0Q=AeU(LKkBz@$(wY zcGhy)1{v~m@UkoYv+LG}C0pFV-VxGmfMP)IHGN__k4c%<^IZYpnHS+!Fm z)sirLdrYEKlNVNFa%Mo;z4mIaMxUsfu&{{J0Jt^5my)>p-=T|OhN`gdin+Xr%l$Sg zRhjtoKTYf1V?)NP7)#)vk4&7eGLqoK*9drn~gLOg%&p2Vt!cP9-u8u;S7x&FWLw2W{>E%yb5a$3)J_mR06?7H3?YR{a&1?i+gr z7S5r|n3(*VMzlpaVg@-lC}oxuUO)1sUu)5jt#x*)bZmG3%6Yr4Ir$Uq?b5SkcqOhc zdGegGfmV~_4ZQOiEybJyqydbt%o@o zufiKjA(_EvsS8wW%qaahh$=vKz_e?7TOLXwG#v75&B;{DMl44w<8J~g?P9{W>syMdC=mpuH+bjG-lwUf+w`VR2RSR0;7%REo_Gu=^c`V0zY@vW6_ zF#4g8;&iXs>5L4e>@6}I%2#-=)13W9h-PD^MP4ZK@zXJzQnbKNTWMIUPhRDe=VjvW zZG6Jn!CuLR6()`{P{G<&$B_+nD zc8MmrwR|-i&d8;%f+#HmSeFsKaCnY%hR-%wXJ_f<@n^?U%&_8*6jrvEZ3c3KSBR|z z#>^x|@wJl6l{$?Z9lz*RCh?W|!@QpHad+~)dnJuN{LWRLLFztdx~y$EG8R`Z8-APX z=p9Un1BKF!&r&!abR-fbl=&Pp-Gn3H#NB^<-#vY|$f+(5LW6Mx=xn9w@ZS zwgj18qVve!3oIhL`Or?5_k%)*aLN);cWLU@@Ify3Au!?kN)o@)=_ha{U zXYjjpwiNIP&78$4?wotBSMug<$AwCBK<|i&-S`(Rp?nP`F~eycVSFWT|7_G|upE4+ z3~Nxo`JLKKUFNX{kr1G&2tMEdRKMNS1}ba%+vE-^US>dIzVXXsYF=}xE*We;)f($H=t{t9*UZ~&?9JAv9gf)Yx0Jk;y-}-F?nt@6Ba)Qo$!oI= zu>5ah75nYcN8onRS?fb*B8AAmqHQpk#!Avb*dBPHWnNDt6;=Ly^GCRJE+RojayA(p zHJdjYvUAWM(eBR;5*057nUe4ykUhmpevl(@^f}tl#;LAnbWSgSsoX@EJ{1)e%<#A| zxs>9Fo|KgJ5(0s|Jtj2>3?jPW+N+_3Z%o%tcbx)uGVYQ;_J0JNZCiJ*B{P2GEuRAG zO*?>g=}ux->c|d3!HS7P{!rN-FNDh{WpXZbMP2Pbj~m6dM`|g&+$+!qe7D9U>jnOx8IThQP&23Ojx{CnFZ`2PI6gPX{~*d0);-U6}Gy?tpp7puUF)umWG3Y>=NCx5SpRpW!&6d6K=V@0bnXHo3GPVL1KoR(WfB$__ee#@>&?CH60{;TLK5XBQ^7&j4A4ourCQNU+SsxX~tG;p7 zUJG0OBbA2Xq$-_bBj2bkLD$8%Q+k7mL|mT^H!sF#Iy|pLHU?p?{*5c}xH+SMOtbI3al*XbVYd7=K2I`juR>Y=eAN zzhVKYD|WbiDE-3K>qA6N^V885K0wYc|3PjUTrR&LLbr`)i{eTRKgXMWs!YKt96Cjx z^rTs38bIV7m88=WmVPF}TF#J^j!iNIy*s|A68{it#fRD+o8sZ$cdokarJi5Xa2F0d z7Lr2I=yg_<4#sr3j@=rl*BIWS$*cc=a~Qvet9s;z!6A2iZ7F8`wOU=j5#d$^1PSPs z=)7#?kC|^r2HkWE;O7aK$^avTyBl2$$Yy3)kro60fB%2~f7pJ-=&?K0nXI|!mwTC! zd-D$m@ACOF>xn|sTs%OgA3>zW12q(R9Vgkg80eenH;NXiKH>KU`+jl-ANg>~+KdO( zVfRTimtZG_=+YWA=KnK@>B2yMQBOu!?fLS;ZI95GoEpYg+xWVsS$5p&jxWyIkJc*# z%2fYK@--TeehdTuAjfb$Uas%84ns8`$@tDnTT*17)hMFNEXtid;R-M_d$)-POOKj* ziWsyYX{C>!EV3Gy{olEnq=4hg24U1)WQ;QC8*DjuKG=t61vR9xvnR)yHjS}YsHV$S z5T^s{3h~r8yh7*rt;)rV>9}PZzFI|xg%AZyq3{$@2i&B1n~i#SkVOEAOjN@SryiXv zkuI$ZVJ(oN(0Vnu;JZ3RV~yRG~U{Kw~rvVGm{jZiY= zz(UoCzf9A3QHe0TT%a*}tirE~GD7k5Ij>mQF{7KX1U@bC;ovMdfJz&{>o#@~y2+q$ z`zGKyjobE2?4cTF=!oi~XV%hznjD9)s3p=`Ql@`udC%`Am|x+C5@X$}aSMzF<5V+N z09@g9Si0dVrMG_!AGUv#C2+cYo>&m2li)f+A>T+)TNUQ8_herE0l&u-FpaAM@2Nlk ze5r4AQNT=&t+TW5XhRbMCm=SQc;Fnk=6cj5cy3!;_(NSife)cg8F4VZz}nk~LJa2% zXaGmLVS|4ml3?XL;?uQF#)^X?KbYCFAM0bVuN5Ja)!J|T1( z#}}(tNVXpBF}8)uvc!d3z_Dd?^ky1CNVSm3lEaJH2cf=Da$ud0j<%`xmxwu!&mCUf zCWsFm>~P?^UIVpEmFSNuLY{fBn8=NO)^i!cG7)#x0;)*DuI35w!}GAicZbkFZ+CLK>0 zmi4!>6?eoF+nLv==SonIW`)#goZr}}Ktq6I38vqT)o`TGw~zR<6mdn$IwW@vqu{pS zLrBU6S**Gu1$a0f|0*m-Zb+5U{zp$TPYw7ak=713L!9qYpMCzf7<^Syg)qQGrW0qn z;^wPaW0a(4bd_SwDw^|}`wAemah;pGX0Nk2_WLR&l1aC`Bg0z&jf1vRzX>bi#F42_LE?P|9}5~|JnZk-QWNK0035q BPWb=; literal 0 HcmV?d00001 diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 7e611c569d..bea88936d3 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -35,6 +35,7 @@ DATA_FILE_SLC_NORM = pjoin(IO_DATA_PATH, 'csa_slice_norm.dcm') DATA_FILE_DEC_RSCL = pjoin(IO_DATA_PATH, 'decimal_rescale.dcm') DATA_FILE_4D = pjoin(IO_DATA_PATH, '4d_multiframe_test.dcm') +DATA_FILE_EMPTY_ST = pjoin(IO_DATA_PATH, 'slicethickness_empty_string.dcm') # This affine from our converted image was shown to match our image spatially # with an image from SPM DICOM conversion. We checked the matching with SPM @@ -616,6 +617,11 @@ def test_data_real(self): assert_equal(sha1(dat_str).hexdigest(), '149323269b0af92baa7508e19ca315240f77fa8c') + @dicom_test + def test_slicethickness_fallback(self): + dw = didw.wrapper_from_file(DATA_FILE_EMPTY_ST) + assert_equal(dw.voxel_sizes[2], 1.0) + @dicom_test def test_data_fake(self): # Test algorithm for get_data From 8ec70223ad4b843fec146b957b21601bb2f33190 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 21 Mar 2019 13:00:39 -0400 Subject: [PATCH 037/689] CI: Attempt to use virtualenv on Windows --- appveyor.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 4b34c61447..769af07caf 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -21,8 +21,13 @@ install: # the parent CMD process). - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% + - python -m pip install --upgrade pip virtualenv pywin32 + - virtualenv --python=python venv + - venv\Scripts\activate + - python --version + # Install the dependencies of the project. - - python -m pip install --upgrade pip setuptools wheel + - pip install --upgrade pip setuptools>=27.0 wheel - pip install numpy scipy matplotlib h5py pydicom - pip install nose mock coverage codecov - pip install . From 9b2d6bfde61cc134c49a6c997cd7bfbb3383adae Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 21 Mar 2019 16:38:16 -0400 Subject: [PATCH 038/689] CI: Use pypiwin32 over pywin32 --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 769af07caf..ad37d1e23f 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -21,7 +21,7 @@ install: # the parent CMD process). - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% - - python -m pip install --upgrade pip virtualenv pywin32 + - python -m pip install --upgrade pip virtualenv pypiwin32 - virtualenv --python=python venv - venv\Scripts\activate - python --version From 436231c94f7cf86d731e096dbb80bbe6522a702c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 22 Mar 2019 10:55:27 +0000 Subject: [PATCH 039/689] Apply suggestions from code review Co-Authored-By: MichielCottaar --- nibabel/cifti2/cifti2_axes.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index e2ed0b68cf..18122904bd 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1,5 +1,5 @@ import numpy as np -from nibabel.cifti2 import cifti2 +from . import cifti2 from six import string_types, add_metaclass, integer_types from operator import xor import abc @@ -650,7 +650,7 @@ def from_brain_models(cls, named_brain_models): affine = bm.affine volume_shape = bm.volume_shape else: - if (affine != bm.affine).any() or (volume_shape != bm.volume_shape): + if not np.allclose(affine, bm.affine) or (volume_shape != bm.volume_shape): raise ValueError("Can not combine brain models defined in different " + "volumes into a single Parcel axis") all_voxels[idx_parcel] = voxels @@ -695,7 +695,7 @@ def from_mapping(cls, mim): nvoxels = 0 if parcel.voxel_indices_ijk is None else len(parcel.voxel_indices_ijk) voxels = np.zeros((nvoxels, 3), dtype='i4') if nvoxels != 0: - voxels[()] = parcel.voxel_indices_ijk + voxels[:] = parcel.voxel_indices_ijk vertices = {} for vertex in parcel.vertices: name = vertex.brain_structure @@ -710,7 +710,7 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the Parsel to a MatrixIndicesMap for storage in CIFTI format + Converts the Parcel to a MatrixIndicesMap for storage in CIFTI format Parameters ---------- @@ -776,7 +776,7 @@ def __len__(self): def __eq__(self, other): if (self.__class__ != other.__class__ or len(self) != len(other) or - (self.name != other.name).all() or self.nvertices != other.nvertices or + not np.array_equal(self.name, other.name) or self.nvertices != other.nvertices or any((vox1 != vox2).any() for vox1, vox2 in zip(self.voxels, other.voxels))): return False if self.affine is not None: @@ -815,14 +815,14 @@ def __add__(self, other): affine, shape = other.affine, other.volume_shape else: affine, shape = self.affine, self.volume_shape - if other.affine is not None and ((other.affine != affine).all() or + if other.affine is not None and (not np.allclose(other.affine, affine) or other.volume_shape != shape): raise ValueError("Trying to concatenate two Parcels defined " + "in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two Parcels with inconsistent " + + raise ValueError("Trying to concatenate two Parcels with inconsistent " "number of vertices for %s" % name) nvertices[name] = value @@ -850,7 +850,7 @@ def __getitem__(self, item): return self.voxels[idx[0]], self.vertices[idx[0]] if isinstance(item, integer_types): return self.get_element(item) - return type(self)(self.name[item], self.voxels[item], self.vertices[item], + return self.__class__(self.name[item], self.voxels[item], self.vertices[item], self.affine, self.volume_shape, self.nvertices) def get_element(self, index): @@ -1334,7 +1334,7 @@ def get_element(self, index): """ if index < 0: index = self.size + index - if index >= self.size: + if index >= self.size or index < 0: raise IndexError("index %i is out of range for Series with size %i" % (index, self.size)) return self.start + self.step * index From d1bc7fea3b5fc2f7f957ff132c26a60ebece52c7 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 10:57:46 +0000 Subject: [PATCH 040/689] BF: only set idx_start to size - 1 for negative step --- nibabel/cifti2/cifti2_axes.py | 2 +- nibabel/cifti2/tests/test_axes.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 18122904bd..a8ba5bce92 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1305,7 +1305,7 @@ def __getitem__(self, item): idx_end = ((-1 if step < 0 else self.size) if item.stop is None else (item.stop if item.stop >= 0 else self.size + item.stop)) - if idx_start > self.size: + if idx_start > self.size and step < 0: idx_start = self.size - 1 if idx_end > self.size: idx_end = self.size diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index c38b5401f5..71b068779e 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -395,7 +395,9 @@ def test_series(): assert (sr[0][1:-1:2].time == sr[0].time[1:-1:2]).all() assert (sr[0][::2].time == sr[0].time[::2]).all() assert (sr[0][:10:2].time == sr[0].time[::2]).all() - assert (sr[0][10::-1].time == sr[0].time[::-1]).all() + assert (sr[0][10:].time == sr[0].time[10:]).all() + assert (sr[0][10:12].time == sr[0].time[10:12]).all() + assert (sr[0][10::-1].time == sr[0].time[10::-1]).all() assert (sr[0][3:1:-1].time == sr[0].time[3:1:-1]).all() assert (sr[0][1:3:-1].time == sr[0].time[1:3:-1]).all() From 949da0ef3ef73e02a0efe87d0c966c1dcc875a45 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 11:28:44 +0000 Subject: [PATCH 041/689] TEST: add tests for Axis __eq__ methods Tests changing any individual part of the Axis leads to inequality. Some bugs found when adding tests --- nibabel/cifti2/cifti2_axes.py | 8 +- nibabel/cifti2/tests/test_axes.py | 148 ++++++++++++++++++++++++++++++ 2 files changed, 152 insertions(+), 4 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index a8ba5bce92..ce7c1062a8 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -777,12 +777,12 @@ def __len__(self): def __eq__(self, other): if (self.__class__ != other.__class__ or len(self) != len(other) or not np.array_equal(self.name, other.name) or self.nvertices != other.nvertices or - any((vox1 != vox2).any() for vox1, vox2 in zip(self.voxels, other.voxels))): + any(not np.array_equal(vox1, vox2) for vox1, vox2 in zip(self.voxels, other.voxels))): return False if self.affine is not None: if ( other.affine is None or - abs(self.affine - other.affine).max() > 1e-8 or + not np.allclose(self.affine, other.affine) or self.volume_shape != other.volume_shape ): return False @@ -792,7 +792,7 @@ def __eq__(self, other): if len(vert1) != len(vert2): return False for name in vert1.keys(): - if name not in vert2 or (vert1[name] != vert2[name]).all(): + if name not in vert2 or not np.array_equal(vert1[name], vert2[name]): return False return True @@ -1021,7 +1021,7 @@ def __init__(self, name, label, meta=None): """ self.name = np.asanyarray(name, dtype='U') if isinstance(label, dict): - label = [label] * self.name.size + label = [label.copy() for _ in range(self.name.size)] self.label = np.asanyarray(label, dtype='object') if meta is None: meta = [{} for _ in range(self.name.size)] diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 71b068779e..452957128e 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -2,6 +2,7 @@ from nose.tools import assert_raises from .test_cifti2io_axes import check_rewrite import nibabel.cifti2.cifti2_axes as axes +from copy import deepcopy rand_affine = np.random.randn(4, 4) @@ -247,6 +248,55 @@ def test_brain_models(): with assert_raises(ValueError): axes.Parcels.from_brain_models([('a', bm_vertex), ('b', bm_other_number)]) + # test equalities + bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4), volume_shape=(2, 3, 4)) + bm_other = deepcopy(bm_vox) + assert bm_vox == bm_other + bm_other.voxel[1, 0] = 0 + assert bm_vox != bm_other + + bm_other = deepcopy(bm_vox) + bm_other.vertex[1] = 10 + assert bm_vox == bm_other, 'vertices are ignored in volumetric BrainModel' + + bm_other = deepcopy(bm_vox) + bm_other.name[1] = 'BRAIN_STRUCTURE_OTHER' + assert bm_vox != bm_other + + bm_other = deepcopy(bm_vox) + bm_other.affine[0, 0] = 10 + assert bm_vox != bm_other + + bm_other = deepcopy(bm_vox) + bm_other.volume_shape = (10, 3, 4) + assert bm_vox != bm_other + + bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + bm_other = deepcopy(bm_vertex) + assert bm_vertex == bm_other + bm_other.voxel[1, 0] = 0 + assert bm_vertex == bm_other, 'voxels are ignored in surface BrainModel' + + bm_other = deepcopy(bm_vertex) + bm_other.vertex[1] = 10 + assert bm_vertex != bm_other + + bm_other = deepcopy(bm_vertex) + bm_other.name[1] = 'BRAIN_STRUCTURE_CORTEX_RIGHT' + assert bm_vertex != bm_other + + bm_other = deepcopy(bm_vertex) + bm_other.nvertices['BRAIN_STRUCTURE_CORTEX_LEFT'] = 50 + assert bm_vertex != bm_other + + bm_other = deepcopy(bm_vertex) + bm_other.nvertices['BRAIN_STRUCTURE_CORTEX_RIGHT'] = 20 + assert bm_vertex != bm_other + + assert bm_vox != get_parcels() + assert bm_vertex != get_parcels() + def test_parcels(): """ @@ -319,6 +369,45 @@ def test_parcels(): with assert_raises(ValueError): prc + other_prc + # test parcel equalities + prc = get_parcels() + assert prc != get_scalar() + + prc_other = deepcopy(prc) + assert prc == prc_other + assert prc != prc_other[:2] + assert prc == prc_other[:] + prc_other.affine[0, 0] = 10 + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.volume_shape = (10, 3, 4) + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.nvertices['CIFTI_STRUCTURE_CORTEX_LEFT'] = 80 + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.voxels[0] = np.ones((2, 3), dtype='i4') + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.voxels[0] = prc_other.voxels * 2 + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.vertices[0]['CIFTI_STRUCTURE_CORTEX_LEFT'] = np.ones((8, ), dtype='i4') + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.vertices[0]['CIFTI_STRUCTURE_CORTEX_LEFT'] *= 2 + assert prc != prc_other + + prc_other = deepcopy(prc) + prc_other.name[0] = 'new_name' + assert prc != prc_other + def test_scalar(): """ @@ -337,6 +426,25 @@ def test_scalar(): assert sc2[:3] == sc assert sc2[3:] == sc + sc.meta[1]['a'] = 3 + assert 'a' not in sc.meta + + # test equalities + assert sc != get_parcels() + + sc_other = deepcopy(sc) + assert sc == sc_other + assert sc != sc_other[:2] + assert sc == sc_other[:] + sc_other.name[0] = 'new_name' + assert sc != sc_other + + sc_other = deepcopy(sc) + sc_other.meta[0]['new_key'] = 'new_entry' + assert sc != sc_other + sc.meta[0]['new_key'] = 'new_entry' + assert sc == sc_other + def test_label(): """ @@ -357,6 +465,30 @@ def test_label(): assert lab2[:3] == lab assert lab2[3:] == lab + # test equalities + lab = get_label() + assert lab != get_scalar() + + other_lab = deepcopy(lab) + assert lab != other_lab[:2] + assert lab == other_lab[:] + other_lab.name[0] = 'new_name' + assert lab != other_lab + + other_lab = deepcopy(lab) + other_lab.meta[0]['new_key'] = 'new_item' + assert 'new_key' not in other_lab.meta[1] + assert lab != other_lab + lab.meta[0]['new_key'] = 'new_item' + assert lab == other_lab + + other_lab = deepcopy(lab) + other_lab.label[0][20] = ('new_label', (0, 0, 0, 1)) + assert lab != other_lab + assert 20 not in other_lab.label[1] + lab.label[0][20] = ('new_label', (0, 0, 0, 1)) + assert lab == other_lab + def test_series(): """ @@ -401,6 +533,22 @@ def test_series(): assert (sr[0][3:1:-1].time == sr[0].time[3:1:-1]).all() assert (sr[0][1:3:-1].time == sr[0].time[1:3:-1]).all() + # test_equalities + sr = next(get_series()) + assert sr != sr[:2] + assert sr == sr[:] + + for key, value in ( + ('start', 20), + ('step', 7), + ('size', 14), + ('unit', 'HERTZ'), + ): + sr_other = deepcopy(sr) + assert sr == sr_other + setattr(sr_other, key, value) + assert sr != sr_other + def test_writing(): """ From ac0258c60f8125a38dc418fc7e601072f40cf00b Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 11:31:08 +0000 Subject: [PATCH 042/689] DOC: removed Series attribute list as it is already listed in the __init__ --- nibabel/cifti2/cifti2_axes.py | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index ce7c1062a8..2411f9ac0f 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1149,15 +1149,6 @@ class Series(Axis): Along this axis of the CIFTI vector/matrix the rows/columns increase monotonously in time This Axis describes the time point of each row/column. - - Attributes - ---------- - start : float - starting time point - step : float - sampling time (TR) - size : int - number of time points """ size = None @@ -1168,11 +1159,11 @@ def __init__(self, start, step, size, unit="SECOND"): Parameters ---------- start : float - Time of the first datapoint - step : float - Step size between data points + starting time point + step : float + sampling time (TR) size : int - Number of data points + number of time points unit : str Unit of the step size (one of 'second', 'hertz', 'meter', or 'radian') """ From 6726b00d6c7cfac050b8ece4170216742e9bc374 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 11:33:42 +0000 Subject: [PATCH 043/689] RF: removed separate `extend` method Functionality is still available by adding two Series together --- nibabel/cifti2/cifti2_axes.py | 27 +++++---------------------- 1 file changed, 5 insertions(+), 22 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 2411f9ac0f..80220f459e 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1227,27 +1227,6 @@ def unit(self, value): "('second', 'hertz', 'meter', or 'radian'") self._unit = value.upper() - def extend(self, other_axis): - """ - Concatenates two Series - - Note: this will ignore the start point of the other axis - - Parameters - ---------- - other_axis : Series - other axis - - Returns - ------- - Series - """ - if other_axis.step != self.step: - raise ValueError('Can only concatenate Series with the same step size') - if other_axis.unit != self.unit: - raise ValueError('Can only concatenate Series with the same unit') - return Series(self.start, self.step, self.size + other_axis.size, self.unit) - def __len__(self): return self.size @@ -1284,7 +1263,11 @@ def __add__(self, other): raised if the repetition time of the two time Series is different """ if isinstance(other, Series): - return self.extend(other) + if other.step != self.step: + raise ValueError('Can only concatenate Series with the same step size') + if other.unit != self.unit: + raise ValueError('Can only concatenate Series with the same unit') + return Series(self.start, self.step, self.size + other.size, self.unit) return NotImplemented def __getitem__(self, item): From 790aba4e378461ee8786f3dd4f41415544bc8749 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 11:34:26 +0000 Subject: [PATCH 044/689] BF: report original index --- nibabel/cifti2/cifti2_axes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 80220f459e..899b8104cf 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1306,9 +1306,10 @@ def get_element(self, index): ------- float """ + original_index = index if index < 0: index = self.size + index if index >= self.size or index < 0: raise IndexError("index %i is out of range for Series with size %i" % - (index, self.size)) + (original_index, self.size)) return self.start + self.step * index From e7302d669d56f6daf45316d6c211a078035a5cdb Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 11:36:02 +0000 Subject: [PATCH 045/689] RF: removed spurious '+' when concatenating literal strings --- nibabel/cifti2/cifti2_axes.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 899b8104cf..dd5fa7b6b4 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -180,7 +180,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, self.volume_shape = None else: if affine is None or volume_shape is None: - raise ValueError("Affine and volume shape should be defined " + + raise ValueError("Affine and volume shape should be defined " "for BrainModel containing voxels") self.affine = affine self.volume_shape = volume_shape @@ -508,13 +508,13 @@ def __add__(self, other): not np.allclose(other.affine, affine) or other.volume_shape != shape ): - raise ValueError("Trying to concatenate two BrainModels defined " + + raise ValueError("Trying to concatenate two BrainModels defined " "in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two BrainModels with inconsistent " + + raise ValueError("Trying to concatenate two BrainModels with inconsistent " "number of vertices for %s" % name) nvertices[name] = value return self.__class__( @@ -651,7 +651,7 @@ def from_brain_models(cls, named_brain_models): volume_shape = bm.volume_shape else: if not np.allclose(affine, bm.affine) or (volume_shape != bm.volume_shape): - raise ValueError("Can not combine brain models defined in different " + + raise ValueError("Can not combine brain models defined in different " "volumes into a single Parcel axis") all_voxels[idx_parcel] = voxels @@ -659,7 +659,7 @@ def from_brain_models(cls, named_brain_models): for name, _, bm_part in bm.iter_structures(): if name in bm.nvertices.keys(): if name in nvertices.keys() and nvertices[name] != bm.nvertices[name]: - raise ValueError("Got multiple conflicting number of " + + raise ValueError("Got multiple conflicting number of " "vertices for surface structure %s" % name) nvertices[name] = bm.nvertices[name] vertices[name] = bm_part.vertex @@ -817,7 +817,7 @@ def __add__(self, other): affine, shape = self.affine, self.volume_shape if other.affine is not None and (not np.allclose(other.affine, affine) or other.volume_shape != shape): - raise ValueError("Trying to concatenate two Parcels defined " + + raise ValueError("Trying to concatenate two Parcels defined " "in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): @@ -1290,7 +1290,7 @@ def __getitem__(self, item): nelements, self.unit) elif isinstance(item, integer_types): return self.get_element(item) - raise IndexError('Series can only be indexed with integers or slices ' + + raise IndexError('Series can only be indexed with integers or slices ' 'without breaking the regular structure') def get_element(self, index): From b2c674f7502c7f3abc3a2e06e5b50926bbc3cff6 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 12:11:43 +0000 Subject: [PATCH 046/689] Increased test coverage --- nibabel/cifti2/cifti2_axes.py | 14 +++++-- nibabel/cifti2/tests/test_axes.py | 67 ++++++++++++++++++++++++++++++- 2 files changed, 76 insertions(+), 5 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index dd5fa7b6b4..2ae19f1bbd 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -605,6 +605,13 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert maps names of surface elements to integers (not needed for volumetric CIFTI files) """ self.name = np.asanyarray(name, dtype='U') + as_array = np.asanyarray(voxels) + if as_array.ndim == 1: + voxels = as_array.astype('object') + else: + voxels = np.empty(len(voxels), dtype='object') + for idx in range(len(voxels)): + voxels[idx] = as_array[idx] self.voxels = np.asanyarray(voxels, dtype='object') self.vertices = np.asanyarray(vertices, dtype='object') self.affine = affine @@ -649,10 +656,9 @@ def from_brain_models(cls, named_brain_models): if affine is None: affine = bm.affine volume_shape = bm.volume_shape - else: - if not np.allclose(affine, bm.affine) or (volume_shape != bm.volume_shape): - raise ValueError("Can not combine brain models defined in different " - "volumes into a single Parcel axis") + elif not np.allclose(affine, bm.affine) or (volume_shape != bm.volume_shape): + raise ValueError("Can not combine brain models defined in different " + "volumes into a single Parcel axis") all_voxels[idx_parcel] = voxels vertices = {} diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 452957128e..60454d2397 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -268,6 +268,11 @@ def test_brain_models(): bm_other.affine[0, 0] = 10 assert bm_vox != bm_other + bm_other = deepcopy(bm_vox) + bm_other.affine = None + assert bm_vox != bm_other + assert bm_other != bm_vox + bm_other = deepcopy(bm_vox) bm_other.volume_shape = (10, 3, 4) assert bm_vox != bm_other @@ -380,13 +385,24 @@ def test_parcels(): prc_other.affine[0, 0] = 10 assert prc != prc_other + prc_other = deepcopy(prc) + prc_other.affine = None + assert prc != prc_other + assert prc_other != prc + assert (prc + prc_other).affine is not None + assert (prc_other + prc).affine is not None + prc_other = deepcopy(prc) prc_other.volume_shape = (10, 3, 4) assert prc != prc_other + with assert_raises(ValueError): + prc + prc_other prc_other = deepcopy(prc) prc_other.nvertices['CIFTI_STRUCTURE_CORTEX_LEFT'] = 80 assert prc != prc_other + with assert_raises(ValueError): + prc + prc_other prc_other = deepcopy(prc) prc_other.voxels[0] = np.ones((2, 3), dtype='i4') @@ -408,6 +424,24 @@ def test_parcels(): prc_other.name[0] = 'new_name' assert prc != prc_other + # test direct initialisation + axes.Parcels( + voxels=[np.ones((3, 2), dtype=int)], + vertices=[{}], + name=['single_voxel'], + affine=np.eye(4), + volume_shape=(2, 3, 4), + ) + + with assert_raises(ValueError): + axes.Parcels( + voxels=[np.ones((3, 2), dtype=int)], + vertices=[{}], + name=[['single_voxel']], # wrong shape name array + affine=np.eye(4), + volume_shape=(2, 3, 4), + ) + def test_scalar(): """ @@ -430,7 +464,9 @@ def test_scalar(): assert 'a' not in sc.meta # test equalities - assert sc != get_parcels() + assert sc != get_label() + with assert_raises(Exception): + sc + get_label() sc_other = deepcopy(sc) assert sc == sc_other @@ -445,6 +481,15 @@ def test_scalar(): sc.meta[0]['new_key'] = 'new_entry' assert sc == sc_other + # test constructor + assert axes.Scalar(['scalar_name'], [{}]) == axes.Scalar(['scalar_name']) + + with assert_raises(ValueError): + axes.Scalar([['scalar_name']]) # wrong shape + + with assert_raises(ValueError): + axes.Scalar(['scalar_name'], [{}, {}]) # wrong size + def test_label(): """ @@ -468,6 +513,8 @@ def test_label(): # test equalities lab = get_label() assert lab != get_scalar() + with assert_raises(Exception): + lab + get_scalar() other_lab = deepcopy(lab) assert lab != other_lab[:2] @@ -489,6 +536,15 @@ def test_label(): lab.label[0][20] = ('new_label', (0, 0, 0, 1)) assert lab == other_lab + # test constructor + assert axes.Label(['scalar_name'], [{}], [{}]) == axes.Label(['scalar_name'], [{}]) + + with assert_raises(ValueError): + axes.Label([['scalar_name']], [{}]) # wrong shape + + with assert_raises(ValueError): + axes.Label(['scalar_name'], [{}, {}]) # wrong size + def test_series(): """ @@ -533,8 +589,17 @@ def test_series(): assert (sr[0][3:1:-1].time == sr[0].time[3:1:-1]).all() assert (sr[0][1:3:-1].time == sr[0].time[1:3:-1]).all() + with assert_raises(IndexError): + assert sr[0][[0, 1]] + with assert_raises(IndexError): + assert sr[0][20] + with assert_raises(IndexError): + assert sr[0][-20] + # test_equalities sr = next(get_series()) + with assert_raises(Exception): + sr + get_scalar() assert sr != sr[:2] assert sr == sr[:] From 459fa8855781860817f8a371e016c9756401d9c2 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 12:15:17 +0000 Subject: [PATCH 047/689] RF: replaced last '.all()' with `array_equal` --- nibabel/cifti2/cifti2_axes.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 2ae19f1bbd..5753ea9c00 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -959,7 +959,7 @@ def __eq__(self, other): """ if not isinstance(other, Scalar) or self.size != other.size: return False - return (self.name == other.name).all() and (self.meta == other.meta).all() + return np.array_equal(self.name, other.name) and np.array_equal(self.meta, other.meta) def __add__(self, other): """ @@ -1100,9 +1100,9 @@ def __eq__(self, other): if not isinstance(other, Label) or self.size != other.size: return False return ( - (self.name == other.name).all() and - (self.meta == other.meta).all() and - (self.label == other.label).all() + np.array_equal(self.name, other.name) and + np.array_equal(self.meta, other.meta) and + np.array_equal(self.label, other.label) ) def __add__(self, other): From 6b55e1128e95ab07c42184d16d283a8932d006c6 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 13:33:18 +0000 Subject: [PATCH 048/689] RF: made flake8 happy again --- nibabel/cifti2/cifti2_axes.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 5753ea9c00..22ed322493 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -736,8 +736,8 @@ def to_mapping(self, dim): for name, voxels, vertices in zip(self.name, self.voxels, self.vertices): cifti_voxels = cifti2.Cifti2VoxelIndicesIJK(voxels) element = cifti2.Cifti2Parcel(name, cifti_voxels) - for name, idx_vertices in vertices.items(): - element.vertices.append(cifti2.Cifti2Vertices(name, idx_vertices)) + for name_vertex, idx_vertices in vertices.items(): + element.vertices.append(cifti2.Cifti2Vertices(name_vertex, idx_vertices)) mim.append(element) return mim @@ -783,7 +783,8 @@ def __len__(self): def __eq__(self, other): if (self.__class__ != other.__class__ or len(self) != len(other) or not np.array_equal(self.name, other.name) or self.nvertices != other.nvertices or - any(not np.array_equal(vox1, vox2) for vox1, vox2 in zip(self.voxels, other.voxels))): + any(not np.array_equal(vox1, vox2) + for vox1, vox2 in zip(self.voxels, other.voxels))): return False if self.affine is not None: if ( @@ -857,7 +858,7 @@ def __getitem__(self, item): if isinstance(item, integer_types): return self.get_element(item) return self.__class__(self.name[item], self.voxels[item], self.vertices[item], - self.affine, self.volume_shape, self.nvertices) + self.affine, self.volume_shape, self.nvertices) def get_element(self, index): """ From c24d1b18ad02191bd5eee5c8e3b43f2cd5e71a9b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 22 Mar 2019 10:09:18 -0400 Subject: [PATCH 049/689] CI: Maybe no update to pywin32 is needed? --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index ad37d1e23f..796c74a7da 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -21,7 +21,7 @@ install: # the parent CMD process). - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% - - python -m pip install --upgrade pip virtualenv pypiwin32 + - python -m pip install --upgrade pip virtualenv - virtualenv --python=python venv - venv\Scripts\activate - python --version From e4bc7b01c96e14c9faa23e76613d6d6adde33800 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 22 Mar 2019 18:05:42 +0000 Subject: [PATCH 050/689] BF: don't use np.full to create string array numpy 1.8 in python 2.7 does not like that --- nibabel/cifti2/tests/test_axes.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 60454d2397..2ca0f7cd18 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -171,10 +171,9 @@ def test_brain_models(): # Test the constructor bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) - print(bm_vox.name) - assert np.all(bm_vox.name == np.full(5, 'CIFTI_STRUCTURE_THALAMUS_LEFT')) - assert np.all(bm_vox.vertex == np.full(5, -1)) - assert np.all(bm_vox.voxel == np.full((5, 3), 1)) + assert np.all(bm_vox.name == ['CIFTI_STRUCTURE_THALAMUS_LEFT'] * 5) + assert np.array_equal(bm_vox.vertex, np.full(5, -1)) + assert np.array_equal(bm_vox.voxel, np.full((5, 3), 1)) with assert_raises(ValueError): # no volume shape axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4)) @@ -195,9 +194,9 @@ def test_brain_models(): axes.BrainModel('thalamus_left', voxel=np.ones((5, 2), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) - assert np.all(bm_vertex.name == np.full(5, 'CIFTI_STRUCTURE_CORTEX_LEFT')) - assert np.all(bm_vertex.vertex == np.full(5, 1)) - assert np.all(bm_vertex.voxel == np.full((5, 3), -1)) + assert np.array_equal(bm_vertex.name, ['CIFTI_STRUCTURE_CORTEX_LEFT'] * 5) + assert np.array_equal(bm_vertex.vertex, np.full(5, 1)) + assert np.array_equal(bm_vertex.voxel, np.full((5, 3), -1)) with assert_raises(ValueError): axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int)) with assert_raises(ValueError): From a5f88c2ae6a093d1aeb817ba4a9c1155daa75f37 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Sun, 24 Mar 2019 13:24:33 +0000 Subject: [PATCH 051/689] RF: set surface BrainModel default structure to Other --- nibabel/cifti2/cifti2_axes.py | 2 +- nibabel/cifti2/tests/test_axes.py | 23 ++++++++++++----------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 22ed322493..5c5e8290e9 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -232,7 +232,7 @@ def from_mask(cls, mask, name='other', affine=None): "3-dimensional (for volumes), not %i-dimensional" % mask.ndim) @classmethod - def from_surface(cls, vertices, nvertex, name='Cortex'): + def from_surface(cls, vertices, nvertex, name='Other'): """ Creates a new BrainModel axis describing the vertices on a surface diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 2ca0f7cd18..c248c0c0f4 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -125,33 +125,34 @@ def test_brain_models(): assert (bml[4].voxel == -1).all() assert (bml[4].vertex == [2, 9, 14]).all() - for bm, label in zip(bml, ['ThalamusRight', 'Other', 'cortex_left', 'cortex']): + for bm, label, is_surface in zip(bml, ['ThalamusRight', 'Other', 'cortex_left', 'Other'], + (False, False, True, True)): structures = list(bm.iter_structures()) assert len(structures) == 1 name = structures[0][0] assert name == axes.BrainModel.to_cifti_brain_structure_name(label) - if 'CORTEX' in name: + if is_surface: assert bm.nvertices[name] == 15 else: assert name not in bm.nvertices assert (bm.affine == rand_affine).all() assert bm.volume_shape == vol_shape - bmt = bml[0] + bml[1] + bml[2] + bml[3] - assert len(bmt) == 14 + bmt = bml[0] + bml[1] + bml[2] + assert len(bmt) == 10 structures = list(bmt.iter_structures()) - assert len(structures) == 4 - for bm, (name, _, bm_split) in zip(bml, structures): + assert len(structures) == 3 + for bm, (name, _, bm_split) in zip(bml[:3], structures): assert bm == bm_split assert (bm_split.name == name).all() assert bm == bmt[bmt.name == bm.name[0]] assert bm == bmt[np.where(bmt.name == bm.name[0])] - bmt = bmt + bml[3] - assert len(bmt) == 18 + bmt = bmt + bml[2] + assert len(bmt) == 13 structures = list(bmt.iter_structures()) - assert len(structures) == 4 - assert len(structures[-1][2]) == 8 + assert len(structures) == 3 + assert len(structures[-1][2]) == 6 # break brain model bmt.affine = np.eye(4) @@ -320,7 +321,7 @@ def test_parcels(): assert prc[2] == ('surface', ) + prc['surface'] assert prc['surface'][0].shape == (0, 3) assert len(prc['surface'][1]) == 1 - assert prc['surface'][1]['CIFTI_STRUCTURE_CORTEX'].shape == (4, ) + assert prc['surface'][1]['CIFTI_STRUCTURE_OTHER'].shape == (4, ) prc2 = prc + prc assert len(prc2) == 6 From a31af61c9b189bd40f5f0f0c6ebcf51287d62ca6 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Sun, 24 Mar 2019 13:32:22 +0000 Subject: [PATCH 052/689] RF: adjusted some type desciptions --- nibabel/cifti2/cifti2_axes.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 5c5e8290e9..2dae100998 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -31,7 +31,7 @@ def to_header(axes): Parameters ---------- - axes : iterable[Axis] + axes : iterable of :py:class:`Axis` objects one or more axes describing each dimension in turn Returns @@ -140,10 +140,10 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, affine : np.ndarray (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) - volume_shape : Tuple[int, int, int] + volume_shape : tuple of three integers shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) - nvertices : dict[String -> int] + nvertices : dict from string to integer maps names of surface elements to integers (not needed for volumetric CIFTI files) """ if voxel is None: @@ -361,7 +361,7 @@ def to_cifti_brain_structure_name(name): Parameters ---------- - name: (str, tuple) + name: iterable of 2-element tuples of integer and string input name of an anatomical region Returns @@ -598,7 +598,7 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert affine : np.ndarray (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only covering the surface) - volume_shape : Tuple[int, int, int] + volume_shape : tuple of three integers shape of the volume in which the voxels were defined (not needed for CIFTI files only covering the surface) nvertices : dict[String -> int] @@ -634,7 +634,7 @@ def from_brain_models(cls, named_brain_models): Parameters ---------- - named_brain_models : List[Tuple[String, BrainModel]] + named_brain_models : iterable of 2-element tuples of string and BrainModel list of (parcel name, brain model representation) pairs defining each parcel Returns @@ -889,9 +889,9 @@ def __init__(self, name, meta=None): """ Parameters ---------- - name : np.ndarray + name : np.ndarray of string (N, ) string array with the parcel names - meta : np.ndarray + meta : np.ndarray of dict (N, ) object array with a dictionary of metadata for each row/column. Defaults to empty dictionary """ From 0e0b7f212c9bf9eafb97ef627312d447ddcc3e6b Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Sun, 24 Mar 2019 14:39:32 +0000 Subject: [PATCH 053/689] DOC: Added tutorial to docs adopted from https://github.com/MichielCottaar/cifti/blob/master/README.md Added CIfTI to list of supported file formats replaced CIFTI with CIfTI2 --- doc/source/api.rst | 1 + nibabel/cifti2/__init__.py | 3 +- nibabel/cifti2/cifti2.py | 7 +- nibabel/cifti2/cifti2_axes.py | 180 +++++++++++++++++++++++++++------- 4 files changed, 148 insertions(+), 43 deletions(-) diff --git a/doc/source/api.rst b/doc/source/api.rst index 1ae1bb416c..0f3cf1de26 100644 --- a/doc/source/api.rst +++ b/doc/source/api.rst @@ -23,6 +23,7 @@ File Formats analyze spm2analyze spm99analyze + cifti2 gifti freesurfer minc1 diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index 0c80e4033b..071f76bf58 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""CIfTI format IO +"""CIfTI2 format IO .. currentmodule:: nibabel.cifti2 @@ -14,6 +14,7 @@ :toctree: ../generated cifti2 + cifti2_axes """ from .parse_cifti2 import Cifti2Extension diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 30bcbda73e..06deb72fe4 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -12,12 +12,9 @@ http://www.nitrc.org/forum/message.php?msg_id=3738 -Definition of the CIFTI2 header format and file extensions attached to this -email: +Definition of the CIFTI2 header format and file extensions can be found at: - http://www.nitrc.org/forum/forum.php?thread_id=4380&forum_id=1955 - -Filename is ``CIFTI-2_Main_FINAL_1March2014.pdf``. + http://www.nitrc.org/projects/cifti ''' from __future__ import division, print_function, absolute_import import re diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 2dae100998..386fe3699f 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1,3 +1,109 @@ +""" +Defines :class:`Axis` objects to create, read, and manipulate CIfTI2 files + +Each type of CIfTI2 axes describing the rows/columns in a CIfTI2 matrix is given a unique class: + +* :class:`BrainModel`: each row/column is a voxel or vertex +* :class:`Parcels`: each row/column is a group of voxels and/or vertices +* :class:`Scalar`: each row/column has a unique name (with optional meta-data) +* :class:`Label`: each row/column has a unique name and label table (with optional meta-data) +* :class:`Series`: each row/column is a timepoint, which increases monotonically + +All of these classes are derived from the :class:`Axis` class. + +After loading a CIfTI2 file a tuple of axes describing the rows and columns can be obtained +from the :meth:`.cifti2.Cifti2Header.get_axis` method on the header object +(e.g. ``nibabel.load().header.get_axis()``). Inversely, a new +:class:`.cifti2.Cifti2Header` object can be created from existing :class:`Axis` objects +using the :meth:`.cifti2.Cifti2Header.from_axes` factory method. + +CIfTI2 :class:`Axis` objects of the same type can be concatenated using the '+'-operator. +Numpy indexing also works on axes +(except for Series objects, which have to remain monotonically increasing or decreasing). + +Creating new CIfTI2 axes +----------------------- +New :class:`Axis` objects can be constructed by providing a description for what is contained +in each row/column of the described tensor. For each :class:`Axis` sub-class this descriptor is: + +* :class:`BrainModel`: a CIfTI2 structure name and a voxel or vertex index +* :class:`Parcels`: a name and a sequence of voxel and vertex indices +* :class:`Scalar`: a name and optionally a dict of meta-data +* :class:`Label`: a name, dict of label index to name and colour, + and optionally a dict of meta-data +* :class:`Series`: the time-point of each row/column is set by setting the start, stop, size, + and unit of the time-series + +Several helper functions exist to create new :class:`BrainModel` axes: + +* :meth:`BrainModel.from_mask` creates a new BrainModel volume covering the + non-zero values of a mask +* :meth:`BrainModel.from_surface` creates a new BrainModel surface covering the provided + indices of a surface + +A :class:`Parcels` axis can be created from a sequence of :class:`BrainModel` axes using +:meth:`Parcels.from_brain_models`. + +Examples +-------- +We can create brain models covering the left cortex and left thalamus using: + +>>> from nibabel import cifti2 +>>> bm_cortex = cifti2.BrainModel.from_mask(cortex_mask, brain_structure='cortex_left') +>>> bm_thal = cifti2.BrainModel.from_mask(thalamus_mask, affine=affine, + brain_structure='thalamus_left') + +Brain structure names automatically get converted to valid CIfTI2 indentifiers using +:meth:`BrainModel.to_cifti_brain_structure_name`. +A 1-dimensional mask will be automatically interpreted as a surface element and a 3-dimensional +mask as a volume element. + +These can be concatenated in a single brain model covering the left cortex and thalamus by +simply adding them together + +>>> bm_full = bm_cortex + bm_thal + +Brain models covering the full HCP grayordinate space can be constructed by adding all the +volumetric and surface brain models together like this (or by reading one from an already +existing HCP file). + +Getting a specific brain region from the full brain model is as simple as: + +>>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_CORTEX_LEFT'] == bm_cortex +>>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_THALAMUS_LEFT'] == bm_thal + +You can also iterate over all brain structures in a brain model: + +>>> for name, slc, bm in bm_full.iter_structures(): ... + +In this case there will be two iterations, namely: +('CIFTI_STRUCTURE_CORTEX_LEFT', slice(0, ), bm_cortex) +and +('CIFTI_STRUCTURE_THALAMUS_LEFT', slice(, None), bm_thal) + +Parcels can be constructed from selections of these brain models: + +>>> parcel = cifti2.Parcels.from_brain_models([ + ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices + ('volume_parcel', bm_thal), # contains thalamus + ('combined_parcel', bm_full[[1, 8, 10, 50, 120, 127]) # contains selected voxels/vertices + ]) + +Time series are represented by their starting time (typically 0), step size +(i.e. sampling time or TR), and number of elements: + +>>> series = cifti2.Series(start=0, step=100, size=5000) + +So a header for fMRI data with a TR of 100 ms covering the left cortex and thalamus with +5000 timepoints could be created with + +>>> cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal)) + +Similarly the curvature and cortical thickness on the left cortex could be stored using a header +like: + +>>> cifti2.Cifti2Header.from_axes((cifti.Scalar(['curvature', 'thickness'], bm_cortex)) +""" import numpy as np from . import cifti2 from six import string_types, add_metaclass, integer_types @@ -7,7 +113,7 @@ def from_mapping(mim): """ - Parses the MatrixIndicesMap to find the appropriate CIFTI axis describing the rows or columns + Parses the MatrixIndicesMap to find the appropriate CIfTI2 axis describing the rows or columns Parameters ---------- @@ -27,7 +133,7 @@ def from_mapping(mim): def to_header(axes): """ - Converts the axes describing the rows/columns of a CIFTI vector/matrix to a Cifti2Header + Converts the axes describing the rows/columns of a CIfTI2 vector/matrix to a Cifti2Header Parameters ---------- @@ -56,7 +162,7 @@ def to_header(axes): @add_metaclass(abc.ABCMeta) class Axis(object): """ - Abstract class for any object describing the rows or columns of a CIFTI vector/matrix + Abstract class for any object describing the rows or columns of a CIfTI2 vector/matrix Mainly used for type checking. """ @@ -110,7 +216,7 @@ def __getitem__(self, item): class BrainModel(Axis): """ - Each row/column in the CIFTI vector/matrix represents a single vertex or voxel + Each row/column in the CIfTI2 vector/matrix represents a single vertex or voxel This Axis describes which vertex/voxel is represented by each row/column. """ @@ -133,18 +239,18 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, name : str or np.ndarray brain structure name or (N, ) string array with the brain structure names voxel : np.ndarray - (N, 3) array with the voxel indices (can be omitted for CIFTI files only + (N, 3) array with the voxel indices (can be omitted for CIfTI2 files only covering the surface) vertex : np.ndarray - (N, ) array with the vertex indices (can be omitted for volumetric CIFTI files) + (N, ) array with the vertex indices (can be omitted for volumetric CIfTI2 files) affine : np.ndarray - (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only + (4, 4) array mapping voxel indices to mm space (not needed for CIfTI2 files only covering the surface) volume_shape : tuple of three integers - shape of the volume in which the voxels were defined (not needed for CIFTI files only + shape of the volume in which the voxels were defined (not needed for CIfTI2 files only covering the surface) nvertices : dict from string to integer - maps names of surface elements to integers (not needed for volumetric CIFTI files) + maps names of surface elements to integers (not needed for volumetric CIfTI2 files) """ if voxel is None: if vertex is None: @@ -256,7 +362,7 @@ def from_surface(cls, vertices, nvertex, name='Other'): @classmethod def from_mapping(cls, mim): """ - Creates a new BrainModel axis based on a CIFTI dataset + Creates a new BrainModel axis based on a CIfTI2 dataset Parameters ---------- @@ -289,12 +395,12 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the brain model axis to a MatrixIndicesMap for storage in CIFTI format + Converts the brain model axis to a MatrixIndicesMap for storage in CIfTI2 format Parameters ---------- dim : int - which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -329,7 +435,7 @@ def iter_structures(self): Yields ------ tuple with 3 elements: - - CIFTI brain structure name + - CIfTI2 brain structure name - slice to select the data associated with the brain structure from the tensor - brain model covering that specific brain structure """ @@ -345,11 +451,11 @@ def iter_structures(self): @staticmethod def to_cifti_brain_structure_name(name): """ - Attempts to convert the name of an anatomical region in a format recognized by CIFTI + Attempts to convert the name of an anatomical region in a format recognized by CIfTI2 This function returns: - - the name if it is in the CIFTI format already + - the name if it is in the CIfTI2 format already - if the name is a tuple the first element is assumed to be the structure name while the second is assumed to be the hemisphere (left, right or both). The latter will default to both. @@ -366,11 +472,11 @@ def to_cifti_brain_structure_name(name): Returns ------- - CIFTI2 compatible name + CIfTI2 compatible name Raises ------ - ValueError: raised if the input name does not match a known anatomical structure in CIFTI + ValueError: raised if the input name does not match a known anatomical structure in CIfTI2 """ if name in cifti2.CIFTI_BRAIN_STRUCTURES: return name @@ -571,7 +677,7 @@ def get_element(self, index): class Parcels(Axis): """ - Each row/column in the CIFTI vector/matrix represents a parcel of voxels/vertices + Each row/column in the CIfTI2 vector/matrix represents a parcel of voxels/vertices This Axis describes which parcel is represented by each row/column. @@ -596,13 +702,13 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert For each parcel the vertices are represented by a mapping from brain structure name to (M, ) index array affine : np.ndarray - (4, 4) array mapping voxel indices to mm space (not needed for CIFTI files only + (4, 4) array mapping voxel indices to mm space (not needed for CIfTI2 files only covering the surface) volume_shape : tuple of three integers - shape of the volume in which the voxels were defined (not needed for CIFTI files only + shape of the volume in which the voxels were defined (not needed for CIfTI2 files only covering the surface) nvertices : dict[String -> int] - maps names of surface elements to integers (not needed for volumetric CIFTI files) + maps names of surface elements to integers (not needed for volumetric CIfTI2 files) """ self.name = np.asanyarray(name, dtype='U') as_array = np.asanyarray(voxels) @@ -675,7 +781,7 @@ def from_brain_models(cls, named_brain_models): @classmethod def from_mapping(cls, mim): """ - Creates a new Parcels axis based on a CIFTI dataset + Creates a new Parcels axis based on a CIfTI2 dataset Parameters ---------- @@ -716,12 +822,12 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the Parcel to a MatrixIndicesMap for storage in CIFTI format + Converts the Parcel to a MatrixIndicesMap for storage in CIfTI2 format Parameters ---------- dim : int - which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -881,7 +987,7 @@ def get_element(self, index): class Scalar(Axis): """ - Along this axis of the CIFTI vector/matrix each row/column has been given + Along this axis of the CIfTI2 vector/matrix each row/column has been given a unique name and optionally metadata """ @@ -908,7 +1014,7 @@ def __init__(self, name, meta=None): @classmethod def from_mapping(cls, mim): """ - Creates a new Scalar axis based on a CIFTI dataset + Creates a new Scalar axis based on a CIfTI2 dataset Parameters ---------- @@ -924,12 +1030,12 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the hcp_labels to a MatrixIndicesMap for storage in CIFTI format + Converts the hcp_labels to a MatrixIndicesMap for storage in CIfTI2 format Parameters ---------- dim : int - which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -1007,9 +1113,9 @@ def get_element(self, index): class Label(Axis): """ - Defines CIFTI axis for label array. + Defines CIfTI2 axis for label array. - Along this axis of the CIFTI vector/matrix each row/column has been given a unique name, + Along this axis of the CIfTI2 vector/matrix each row/column has been given a unique name, label table, and optionally metadata """ @@ -1042,7 +1148,7 @@ def __init__(self, name, label, meta=None): @classmethod def from_mapping(cls, mim): """ - Creates a new Label axis based on a CIFTI dataset + Creates a new Label axis based on a CIfTI2 dataset Parameters ---------- @@ -1059,12 +1165,12 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the hcp_labels to a MatrixIndicesMap for storage in CIFTI format + Converts the hcp_labels to a MatrixIndicesMap for storage in CIfTI2 format Parameters ---------- dim : int - which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -1153,7 +1259,7 @@ def get_element(self, index): class Series(Axis): """ - Along this axis of the CIFTI vector/matrix the rows/columns increase monotonously in time + Along this axis of the CIfTI2 vector/matrix the rows/columns increase monotonously in time This Axis describes the time point of each row/column. """ @@ -1186,7 +1292,7 @@ def time(self): @classmethod def from_mapping(cls, mim): """ - Creates a new Series axis based on a CIFTI dataset + Creates a new Series axis based on a CIfTI2 dataset Parameters ---------- @@ -1202,12 +1308,12 @@ def from_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the Series to a MatrixIndicesMap for storage in CIFTI format + Converts the Series to a MatrixIndicesMap for storage in CIfTI2 format Parameters ---------- dim : int - which dimension of the CIFTI vector/matrix is described by this dataset (zero-based) + which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) Returns ------- From 594b22ff11cc439a9245dcecc97c954b451b9e68 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Sun, 24 Mar 2019 17:29:27 +0000 Subject: [PATCH 054/689] BUG: skip doctests in example code --- nibabel/cifti2/cifti2_axes.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 386fe3699f..8f36721af7 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -49,9 +49,10 @@ We can create brain models covering the left cortex and left thalamus using: >>> from nibabel import cifti2 ->>> bm_cortex = cifti2.BrainModel.from_mask(cortex_mask, brain_structure='cortex_left') +>>> bm_cortex = cifti2.BrainModel.from_mask(cortex_mask, +... brain_structure='cortex_left') # doctest: +SKIP >>> bm_thal = cifti2.BrainModel.from_mask(thalamus_mask, affine=affine, - brain_structure='thalamus_left') +... brain_structure='thalamus_left') # doctest: +SKIP Brain structure names automatically get converted to valid CIfTI2 indentifiers using :meth:`BrainModel.to_cifti_brain_structure_name`. @@ -61,7 +62,7 @@ These can be concatenated in a single brain model covering the left cortex and thalamus by simply adding them together ->>> bm_full = bm_cortex + bm_thal +>>> bm_full = bm_cortex + bm_thal # doctest: +SKIP Brain models covering the full HCP grayordinate space can be constructed by adding all the volumetric and surface brain models together like this (or by reading one from an already @@ -69,12 +70,12 @@ Getting a specific brain region from the full brain model is as simple as: ->>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_CORTEX_LEFT'] == bm_cortex ->>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_THALAMUS_LEFT'] == bm_thal +>>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_CORTEX_LEFT'] == bm_cortex # doctest: +SKIP +>>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_THALAMUS_LEFT'] == bm_thal # doctest: +SKIP You can also iterate over all brain structures in a brain model: ->>> for name, slc, bm in bm_full.iter_structures(): ... +>>> for name, slc, bm in bm_full.iter_structures(): ... # doctest: +SKIP In this case there will be two iterations, namely: ('CIFTI_STRUCTURE_CORTEX_LEFT', slice(0, ), bm_cortex) @@ -84,25 +85,26 @@ Parcels can be constructed from selections of these brain models: >>> parcel = cifti2.Parcels.from_brain_models([ - ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices - ('volume_parcel', bm_thal), # contains thalamus - ('combined_parcel', bm_full[[1, 8, 10, 50, 120, 127]) # contains selected voxels/vertices - ]) +... ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices +... ('volume_parcel', bm_thal), # contains thalamus +... ('combined_parcel', bm_full[[1, 8, 10, 50, 120, 127]) # contains selected voxels/vertices +... ]) # doctest: +SKIP Time series are represented by their starting time (typically 0), step size (i.e. sampling time or TR), and number of elements: ->>> series = cifti2.Series(start=0, step=100, size=5000) +>>> series = cifti2.Series(start=0, step=100, size=5000) # doctest: +SKIP So a header for fMRI data with a TR of 100 ms covering the left cortex and thalamus with 5000 timepoints could be created with ->>> cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal)) +>>> cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal)) # doctest: +SKIP Similarly the curvature and cortical thickness on the left cortex could be stored using a header like: ->>> cifti2.Cifti2Header.from_axes((cifti.Scalar(['curvature', 'thickness'], bm_cortex)) +>>> cifti2.Cifti2Header.from_axes((cifti.Scalar(['curvature', 'thickness'], +... bm_cortex)) # doctest: +SKIP """ import numpy as np from . import cifti2 From 29a52877120cd6d64fb5d5cd54207e037c8283e2 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Sun, 24 Mar 2019 17:30:21 +0000 Subject: [PATCH 055/689] BF: reduces line length in example code --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 8f36721af7..61fadb7ee7 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -87,7 +87,7 @@ >>> parcel = cifti2.Parcels.from_brain_models([ ... ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices ... ('volume_parcel', bm_thal), # contains thalamus -... ('combined_parcel', bm_full[[1, 8, 10, 50, 120, 127]) # contains selected voxels/vertices +... ('combined_parcel', bm_full[[1, 8, 10, 120, 127]) # contains selected voxels/vertices ... ]) # doctest: +SKIP Time series are represented by their starting time (typically 0), step size From 109fa9266ed65a2f2afcaf2d4d1c2c14e6554a2a Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 26 Mar 2019 17:12:12 +0000 Subject: [PATCH 056/689] RF: changes from @demianw not involving name changes --- nibabel/cifti2/cifti2_axes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 61fadb7ee7..00803f4cd1 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1,6 +1,7 @@ """ Defines :class:`Axis` objects to create, read, and manipulate CIfTI2 files +These axes provide an alternative interface to the information in the CIFTI2 header. Each type of CIfTI2 axes describing the rows/columns in a CIfTI2 matrix is given a unique class: * :class:`BrainModel`: each row/column is a voxel or vertex @@ -146,7 +147,7 @@ def to_header(axes): ------- cifti2.Cifti2Header """ - axes = list(axes) + axes = tuple(axes) mims_all = [] matrix = cifti2.Cifti2Matrix() for dim, ax in enumerate(axes): @@ -214,6 +215,7 @@ def __getitem__(self, item): """ Extracts definition of single row/column or new Axis describing a subset of the rows/columns """ + pass class BrainModel(Axis): From f7c47bb389356a88f173fe9700fe7b621dbb58c1 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 26 Mar 2019 17:14:54 +0000 Subject: [PATCH 057/689] RF: rename from_mapping to from_index_mapping --- nibabel/cifti2/cifti2.py | 2 +- nibabel/cifti2/cifti2_axes.py | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 06deb72fe4..ddbc0550cd 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1279,7 +1279,7 @@ def get_axis(self, index): axis : cifti2_axes.Axis ''' from . import cifti2_axes - return cifti2_axes.from_mapping(self.matrix.get_index_map(index)) + return cifti2_axes.from_index_mapping(self.matrix.get_index_map(index)) @classmethod def from_axes(cls, axes): diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 00803f4cd1..c475baa30c 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -114,7 +114,7 @@ import abc -def from_mapping(mim): +def from_index_mapping(mim): """ Parses the MatrixIndicesMap to find the appropriate CIfTI2 axis describing the rows or columns @@ -131,7 +131,7 @@ def from_mapping(mim): 'CIFTI_INDEX_TYPE_SERIES': Series, 'CIFTI_INDEX_TYPE_BRAIN_MODELS': BrainModel, 'CIFTI_INDEX_TYPE_PARCELS': Parcels} - return return_type[mim.indices_map_to_data_type].from_mapping(mim) + return return_type[mim.indices_map_to_data_type].from_index_mapping(mim) def to_header(axes): @@ -364,7 +364,7 @@ def from_surface(cls, vertices, nvertex, name='Other'): nvertices={cifti_name: nvertex}) @classmethod - def from_mapping(cls, mim): + def from_index_mapping(cls, mim): """ Creates a new BrainModel axis based on a CIfTI2 dataset @@ -783,7 +783,7 @@ def from_brain_models(cls, named_brain_models): return Parcels(all_names, all_voxels, all_vertices, affine, volume_shape, nvertices) @classmethod - def from_mapping(cls, mim): + def from_index_mapping(cls, mim): """ Creates a new Parcels axis based on a CIfTI2 dataset @@ -1016,7 +1016,7 @@ def __init__(self, name, meta=None): check_name, getattr(self, check_name).shape)) @classmethod - def from_mapping(cls, mim): + def from_index_mapping(cls, mim): """ Creates a new Scalar axis based on a CIfTI2 dataset @@ -1150,7 +1150,7 @@ def __init__(self, name, label, meta=None): check_name, getattr(self, check_name).shape)) @classmethod - def from_mapping(cls, mim): + def from_index_mapping(cls, mim): """ Creates a new Label axis based on a CIfTI2 dataset @@ -1164,7 +1164,7 @@ def from_mapping(cls, mim): """ tables = [{key: (value.label, value.rgba) for key, value in nm.label_table.items()} for nm in mim.named_maps] - rest = Scalar.from_mapping(mim) + rest = Scalar.from_index_mapping(mim) return Label(rest.name, tables, rest.meta) def to_mapping(self, dim): @@ -1294,7 +1294,7 @@ def time(self): return np.arange(self.size) * self.step + self.start @classmethod - def from_mapping(cls, mim): + def from_index_mapping(cls, mim): """ Creates a new Series axis based on a CIfTI2 dataset From b88d51602810d32df3d395c067461a87ad0c6bb5 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 26 Mar 2019 17:25:21 +0000 Subject: [PATCH 058/689] RF: return CIFTI_MODEL_TYPE to distinguish surface and voxels --- nibabel/cifti2/cifti2_axes.py | 10 ++++++---- nibabel/cifti2/tests/test_axes.py | 6 +++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index c475baa30c..1cbebb1e7c 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -670,13 +670,15 @@ def get_element(self, index): Returns ------- tuple with 3 elements - - boolean, which is True if it is a surface element + - str, 'CIFTI_MODEL_TYPE_SURFACE' for vertex or 'CIFTI_MODEL_TYPE_VOXELS' for voxel - vertex index if it is a surface element, otherwise array with 3 voxel indices - structure.BrainStructure object describing the brain structure the element was taken from """ - is_surface = self.name[index] in self.nvertices.keys() - struct = self.vertex if is_surface else self.voxel - return is_surface, struct[index], self.name[index] + element_type = 'CIFTI_MODEL_TYPE_' + ( + 'SURFACE' if self.name[index] in self.nvertices.keys() else 'VOXELS' + ) + struct = self.vertex if 'SURFACE' in element_type else self.voxel + return element_type, struct[index], self.name[index] class Parcels(Axis): diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index c248c0c0f4..b3ec45ebc4 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -107,7 +107,7 @@ def test_brain_models(): assert len(bml[0]) == 3 assert (bml[0].vertex == -1).all() assert (bml[0].voxel == [[0, 1, 2], [0, 4, 0], [0, 4, 2]]).all() - assert bml[0][1][0] == False + assert bml[0][1][0] == 'CIFTI_MODEL_TYPE_VOXELS' assert (bml[0][1][1] == [0, 4, 0]).all() assert bml[0][1][2] == axes.BrainModel.to_cifti_brain_structure_name('thalamus_right') assert len(bml[1]) == 4 @@ -116,11 +116,11 @@ def test_brain_models(): assert len(bml[2]) == 3 assert (bml[2].voxel == -1).all() assert (bml[2].vertex == [0, 5, 10]).all() - assert bml[2][1] == (True, 5, 'CIFTI_STRUCTURE_CORTEX_LEFT') + assert bml[2][1] == ('CIFTI_MODEL_TYPE_SURFACE', 5, 'CIFTI_STRUCTURE_CORTEX_LEFT') assert len(bml[3]) == 4 assert (bml[3].voxel == -1).all() assert (bml[3].vertex == [0, 5, 10, 13]).all() - assert bml[4][1] == (True, 9, 'CIFTI_STRUCTURE_CORTEX_RIGHT') + assert bml[4][1] == ('CIFTI_MODEL_TYPE_SURFACE', 9, 'CIFTI_STRUCTURE_CORTEX_RIGHT') assert len(bml[4]) == 3 assert (bml[4].voxel == -1).all() assert (bml[4].vertex == [2, 9, 14]).all() From fd8593eb57abbcf64a21e3bf59ed4c97a8967c5a Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 26 Mar 2019 17:37:33 +0000 Subject: [PATCH 059/689] RF: renamed is_surface to surface_mask and added volume_mask --- nibabel/cifti2/cifti2_axes.py | 29 ++++++++++++++-------- nibabel/cifti2/tests/test_axes.py | 1 + nibabel/cifti2/tests/test_cifti2io_axes.py | 4 +-- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 1cbebb1e7c..734aa1bb0e 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -284,8 +284,8 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, if name not in self.name: del self.nvertices[name] - is_surface = self.is_surface - if is_surface.all(): + surface_mask = self.surface_mask + if surface_mask.all(): self.affine = None self.volume_shape = None else: @@ -295,9 +295,9 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, self.affine = affine self.volume_shape = volume_shape - if np.any(self.vertex[is_surface] < 0): + if np.any(self.vertex[surface_mask] < 0): raise ValueError('Undefined vertex indices found for surface elements') - if np.any(self.voxel[~is_surface] < 0): + if np.any(self.voxel[~surface_mask] < 0): raise ValueError('Undefined voxel indices found for volumetric elements') for check_name in ('name', 'voxel', 'vertex'): @@ -523,12 +523,19 @@ def to_cifti_brain_structure_name(name): return proposed_name @property - def is_surface(self): + def surface_mask(self): """ (N, ) boolean array which is true for any element on the surface """ return np.vectorize(lambda name: name in self.nvertices.keys())(self.name) + @property + def volume_mask(self): + """ + (N, ) boolean array which is true for any element on the surface + """ + return np.vectorize(lambda name: name not in self.nvertices.keys())(self.name) + _affine = None @property @@ -586,13 +593,13 @@ def __eq__(self, other): if xor(self.affine is None, other.affine is None): return False return ( - (self.affine is None or + (self.affine is None or np.allclose(self.affine, other.affine) and self.volume_shape == other.volume_shape) and - self.nvertices == other.nvertices and - np.array_equal(self.name, other.name) and - np.array_equal(self.voxel[~self.is_surface], other.voxel[~other.is_surface]) and - np.array_equal(self.vertex[self.is_surface], other.vertex[other.is_surface]) + self.nvertices == other.nvertices and + np.array_equal(self.name, other.name) and + np.array_equal(self.voxel[~self.surface_mask], other.voxel[~other.surface_mask]) and + np.array_equal(self.vertex[self.surface_mask], other.vertex[other.surface_mask]) ) def __add__(self, other): @@ -763,7 +770,7 @@ def from_brain_models(cls, named_brain_models): for idx_parcel, (parcel_name, bm) in enumerate(named_brain_models): all_names.append(parcel_name) - voxels = bm.voxel[~bm.is_surface] + voxels = bm.voxel[~bm.surface_mask] if voxels.shape[0] != 0: if affine is None: affine = bm.affine diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index b3ec45ebc4..150bb88300 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -127,6 +127,7 @@ def test_brain_models(): for bm, label, is_surface in zip(bml, ['ThalamusRight', 'Other', 'cortex_left', 'Other'], (False, False, True, True)): + assert np.all(bm.surface_mask == ~bm.volume_mask) structures = list(bm.iter_structures()) assert len(structures) == 1 name = structures[0][0] diff --git a/nibabel/cifti2/tests/test_cifti2io_axes.py b/nibabel/cifti2/tests/test_cifti2io_axes.py index fee3605ce4..e1025710ff 100644 --- a/nibabel/cifti2/tests/test_cifti2io_axes.py +++ b/nibabel/cifti2/tests/test_cifti2io_axes.py @@ -66,9 +66,9 @@ def check_Conte69(brain_model): structures = list(brain_model.iter_structures()) assert len(structures) == 2 assert structures[0][0] == 'CIFTI_STRUCTURE_CORTEX_LEFT' - assert structures[0][2].is_surface.all() + assert structures[0][2].surface_mask.all() assert structures[1][0] == 'CIFTI_STRUCTURE_CORTEX_RIGHT' - assert structures[1][2].is_surface.all() + assert structures[1][2].surface_mask.all() assert (brain_model.voxel == -1).all() assert (brain_model.vertex[:5] == np.arange(5)).all() From cdf57dba3be985c60f0b388f2640edc8a534325f Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Tue, 26 Mar 2019 21:36:05 +0000 Subject: [PATCH 060/689] STY: fixed indentation --- nibabel/cifti2/cifti2_axes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 734aa1bb0e..e3008518f6 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -594,8 +594,8 @@ def __eq__(self, other): return False return ( (self.affine is None or - np.allclose(self.affine, other.affine) and - self.volume_shape == other.volume_shape) and + np.allclose(self.affine, other.affine) and + self.volume_shape == other.volume_shape) and self.nvertices == other.nvertices and np.array_equal(self.name, other.name) and np.array_equal(self.voxel[~self.surface_mask], other.voxel[~other.surface_mask]) and From b51d5f114879a0b696912fdd6be1a1185a38b88f Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 10:42:45 +0000 Subject: [PATCH 061/689] DOC: consistently use CIFTI-2 instead of CIfTI2 or CIFTI2 --- nibabel/cifti2/__init__.py | 2 +- nibabel/cifti2/cifti2.py | 36 ++++---- nibabel/cifti2/cifti2_axes.py | 90 ++++++++++---------- nibabel/cifti2/parse_cifti2.py | 35 ++++---- nibabel/cifti2/tests/test_axes.py | 14 +-- nibabel/cifti2/tests/test_cifti2.py | 2 +- nibabel/cifti2/tests/test_cifti2io_header.py | 2 +- nibabel/cifti2/tests/test_name.py | 2 +- nibabel/cifti2/tests/test_new_cifti2.py | 2 +- 9 files changed, 93 insertions(+), 92 deletions(-) diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index 071f76bf58..dc88a24b48 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -"""CIfTI2 format IO +"""CIFTI-2 format IO .. currentmodule:: nibabel.cifti2 diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index ddbc0550cd..7ca5584bb1 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -6,13 +6,13 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to CIfTI2 image format +''' Read / write access to CIFTI-2 image format Format of the NIFTI2 container format described here: http://www.nitrc.org/forum/message.php?msg_id=3738 -Definition of the CIFTI2 header format and file extensions can be found at: +Definition of the CIFTI-2 header format and file extensions can be found at: http://www.nitrc.org/projects/cifti ''' @@ -39,7 +39,7 @@ def _float_01(val): class Cifti2HeaderError(Exception): - """ Error in CIFTI2 header + """ Error in CIFTI-2 header """ @@ -175,7 +175,7 @@ def _to_xml_element(self): class Cifti2LabelTable(xml.XmlSerializable, MutableMapping): - """ CIFTI2 label table: a sequence of ``Cifti2Label``s + """ CIFTI-2 label table: a sequence of ``Cifti2Label``s * Description - Used by NamedMap when IndicesMapToDataType is "CIFTI_INDEX_TYPE_LABELS" in order to associate names and display colors @@ -233,7 +233,7 @@ def _to_xml_element(self): class Cifti2Label(xml.XmlSerializable): - """ CIFTI2 label: association of integer key with a name and RGBA values + """ CIFTI-2 label: association of integer key with a name and RGBA values For all color components, value is floating point with range 0.0 to 1.0. @@ -311,7 +311,7 @@ def _to_xml_element(self): class Cifti2NamedMap(xml.XmlSerializable): - """CIFTI2 named map: association of name and optional data with a map index + """CIFTI-2 named map: association of name and optional data with a map index Associates a name, optional metadata, and possibly a LabelTable with an index in a map. @@ -429,7 +429,7 @@ def _to_xml_element(self): class Cifti2VoxelIndicesIJK(xml.XmlSerializable, MutableSequence): - """CIFTI2 VoxelIndicesIJK: Set of voxel indices contained in a structure + """CIFTI-2 VoxelIndicesIJK: Set of voxel indices contained in a structure * Description - Identifies the voxels that model a brain structure, or participate in a parcel. Note that when this is a child of BrainModel, @@ -511,7 +511,7 @@ def _to_xml_element(self): class Cifti2Vertices(xml.XmlSerializable, MutableSequence): - """CIFTI2 vertices - association of brain structure and a list of vertices + """CIFTI-2 vertices - association of brain structure and a list of vertices * Description - Contains a BrainStructure type and a list of vertex indices within a Parcel. @@ -577,7 +577,7 @@ def _to_xml_element(self): class Cifti2Parcel(xml.XmlSerializable): - """CIFTI2 parcel: association of a name with vertices and/or voxels + """CIFTI-2 parcel: association of a name with vertices and/or voxels * Description - Associates a name, plus vertices and/or voxels, with an index. @@ -692,7 +692,7 @@ def _to_xml_element(self): class Cifti2Volume(xml.XmlSerializable): - """CIFTI2 volume: information about a volume for mappings that use voxels + """CIFTI-2 volume: information about a volume for mappings that use voxels * Description - Provides information about the volume for any mappings that use voxels. @@ -735,7 +735,7 @@ def _to_xml_element(self): class Cifti2VertexIndices(xml.XmlSerializable, MutableSequence): - """CIFTI2 vertex indices: vertex indices for an associated brain model + """CIFTI-2 vertex indices: vertex indices for an associated brain model The vertex indices (which are independent for each surface, and zero-based) that are used in this brain model[.] The parent @@ -1078,7 +1078,7 @@ def _to_xml_element(self): class Cifti2Matrix(xml.XmlSerializable, MutableSequence): - """ CIFTI2 Matrix object + """ CIFTI-2 Matrix object This is a list-like container where the elements are instances of :class:`Cifti2MatrixIndicesMap`. @@ -1210,7 +1210,7 @@ def _to_xml_element(self): class Cifti2Header(FileBasedHeader, xml.XmlSerializable): - ''' Class for CIFTI2 header extension ''' + ''' Class for CIFTI-2 header extension ''' def __init__(self, matrix=None, version="2.0"): FileBasedHeader.__init__(self) @@ -1301,7 +1301,7 @@ def from_axes(cls, axes): class Cifti2Image(DataobjImage): - """ Class for single file CIFTI2 format image + """ Class for single file CIFTI-2 format image """ header_class = Cifti2Header valid_exts = Nifti2Image.valid_exts @@ -1329,7 +1329,7 @@ def __init__(self, returns an array from ``np.asanyarray``. It should have a ``shape`` attribute or property. header : Cifti2Header instance or Sequence[cifti2_axes.Axis] - Header with data for / from XML part of CIFTI2 format. + Header with data for / from XML part of CIFTI-2 format. Alternatively a sequence of cifti2_axes.Axis objects can be provided describing each dimension of the array. nifti_header : None or mapping or NIfTI2 header instance, optional @@ -1356,7 +1356,7 @@ def nifti_header(self): @classmethod def from_file_map(klass, file_map): - """ Load a CIFTI2 image from a file_map + """ Load a CIFTI-2 image from a file_map Parameters ---------- @@ -1376,7 +1376,7 @@ def from_file_map(klass, file_map): cifti_header = item.get_content() break else: - raise ValueError('NIfTI2 header does not contain a CIFTI2 ' + raise ValueError('NIfTI2 header does not contain a CIFTI-2 ' 'extension') # Construct cifti image. @@ -1435,7 +1435,7 @@ def to_file_map(self, file_map=None): img.to_file_map(file_map or self.file_map) def update_headers(self): - ''' Harmonize CIFTI2 and NIfTI headers with image data + ''' Harmonize CIFTI-2 and NIfTI headers with image data >>> import numpy as np >>> data = np.zeros((2,3,4)) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index e3008518f6..e0f8c0f7ea 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -1,8 +1,8 @@ """ -Defines :class:`Axis` objects to create, read, and manipulate CIfTI2 files +Defines :class:`Axis` objects to create, read, and manipulate CIFTI-2 files -These axes provide an alternative interface to the information in the CIFTI2 header. -Each type of CIfTI2 axes describing the rows/columns in a CIfTI2 matrix is given a unique class: +These axes provide an alternative interface to the information in the CIFTI-2 header. +Each type of CIFTI-2 axes describing the rows/columns in a CIFTI-2 matrix is given a unique class: * :class:`BrainModel`: each row/column is a voxel or vertex * :class:`Parcels`: each row/column is a group of voxels and/or vertices @@ -12,22 +12,22 @@ All of these classes are derived from the :class:`Axis` class. -After loading a CIfTI2 file a tuple of axes describing the rows and columns can be obtained +After loading a CIFTI-2 file a tuple of axes describing the rows and columns can be obtained from the :meth:`.cifti2.Cifti2Header.get_axis` method on the header object (e.g. ``nibabel.load().header.get_axis()``). Inversely, a new :class:`.cifti2.Cifti2Header` object can be created from existing :class:`Axis` objects using the :meth:`.cifti2.Cifti2Header.from_axes` factory method. -CIfTI2 :class:`Axis` objects of the same type can be concatenated using the '+'-operator. +CIFTI-2 :class:`Axis` objects of the same type can be concatenated using the '+'-operator. Numpy indexing also works on axes (except for Series objects, which have to remain monotonically increasing or decreasing). -Creating new CIfTI2 axes +Creating new CIFTI-2 axes ----------------------- New :class:`Axis` objects can be constructed by providing a description for what is contained in each row/column of the described tensor. For each :class:`Axis` sub-class this descriptor is: -* :class:`BrainModel`: a CIfTI2 structure name and a voxel or vertex index +* :class:`BrainModel`: a CIFTI-2 structure name and a voxel or vertex index * :class:`Parcels`: a name and a sequence of voxel and vertex indices * :class:`Scalar`: a name and optionally a dict of meta-data * :class:`Label`: a name, dict of label index to name and colour, @@ -55,7 +55,7 @@ >>> bm_thal = cifti2.BrainModel.from_mask(thalamus_mask, affine=affine, ... brain_structure='thalamus_left') # doctest: +SKIP -Brain structure names automatically get converted to valid CIfTI2 indentifiers using +Brain structure names automatically get converted to valid CIFTI-2 indentifiers using :meth:`BrainModel.to_cifti_brain_structure_name`. A 1-dimensional mask will be automatically interpreted as a surface element and a 3-dimensional mask as a volume element. @@ -116,7 +116,7 @@ def from_index_mapping(mim): """ - Parses the MatrixIndicesMap to find the appropriate CIfTI2 axis describing the rows or columns + Parses the MatrixIndicesMap to find the appropriate CIFTI-2 axis describing the rows or columns Parameters ---------- @@ -136,7 +136,7 @@ def from_index_mapping(mim): def to_header(axes): """ - Converts the axes describing the rows/columns of a CIfTI2 vector/matrix to a Cifti2Header + Converts the axes describing the rows/columns of a CIFTI-2 vector/matrix to a Cifti2Header Parameters ---------- @@ -165,7 +165,7 @@ def to_header(axes): @add_metaclass(abc.ABCMeta) class Axis(object): """ - Abstract class for any object describing the rows or columns of a CIfTI2 vector/matrix + Abstract class for any object describing the rows or columns of a CIFTI-2 vector/matrix Mainly used for type checking. """ @@ -220,7 +220,7 @@ def __getitem__(self, item): class BrainModel(Axis): """ - Each row/column in the CIfTI2 vector/matrix represents a single vertex or voxel + Each row/column in the CIFTI-2 vector/matrix represents a single vertex or voxel This Axis describes which vertex/voxel is represented by each row/column. """ @@ -243,18 +243,18 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, name : str or np.ndarray brain structure name or (N, ) string array with the brain structure names voxel : np.ndarray - (N, 3) array with the voxel indices (can be omitted for CIfTI2 files only + (N, 3) array with the voxel indices (can be omitted for CIFTI-2 files only covering the surface) vertex : np.ndarray - (N, ) array with the vertex indices (can be omitted for volumetric CIfTI2 files) + (N, ) array with the vertex indices (can be omitted for volumetric CIFTI-2 files) affine : np.ndarray - (4, 4) array mapping voxel indices to mm space (not needed for CIfTI2 files only + (4, 4) array mapping voxel indices to mm space (not needed for CIFTI-2 files only covering the surface) volume_shape : tuple of three integers - shape of the volume in which the voxels were defined (not needed for CIfTI2 files only + shape of the volume in which the voxels were defined (not needed for CIFTI-2 files only covering the surface) nvertices : dict from string to integer - maps names of surface elements to integers (not needed for volumetric CIfTI2 files) + maps names of surface elements to integers (not needed for volumetric CIFTI-2 files) """ if voxel is None: if vertex is None: @@ -366,7 +366,7 @@ def from_surface(cls, vertices, nvertex, name='Other'): @classmethod def from_index_mapping(cls, mim): """ - Creates a new BrainModel axis based on a CIfTI2 dataset + Creates a new BrainModel axis based on a CIFTI-2 dataset Parameters ---------- @@ -399,12 +399,12 @@ def from_index_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the brain model axis to a MatrixIndicesMap for storage in CIfTI2 format + Converts the brain model axis to a MatrixIndicesMap for storage in CIFTI-2 format Parameters ---------- dim : int - which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) + which dimension of the CIFTI-2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -439,7 +439,7 @@ def iter_structures(self): Yields ------ tuple with 3 elements: - - CIfTI2 brain structure name + - CIFTI-2 brain structure name - slice to select the data associated with the brain structure from the tensor - brain model covering that specific brain structure """ @@ -455,11 +455,11 @@ def iter_structures(self): @staticmethod def to_cifti_brain_structure_name(name): """ - Attempts to convert the name of an anatomical region in a format recognized by CIfTI2 + Attempts to convert the name of an anatomical region in a format recognized by CIFTI-2 This function returns: - - the name if it is in the CIfTI2 format already + - the name if it is in the CIFTI-2 format already - if the name is a tuple the first element is assumed to be the structure name while the second is assumed to be the hemisphere (left, right or both). The latter will default to both. @@ -476,11 +476,11 @@ def to_cifti_brain_structure_name(name): Returns ------- - CIfTI2 compatible name + CIFTI-2 compatible name Raises ------ - ValueError: raised if the input name does not match a known anatomical structure in CIfTI2 + ValueError: raised if the input name does not match a known anatomical structure in CIFTI-2 """ if name in cifti2.CIFTI_BRAIN_STRUCTURES: return name @@ -690,7 +690,7 @@ def get_element(self, index): class Parcels(Axis): """ - Each row/column in the CIfTI2 vector/matrix represents a parcel of voxels/vertices + Each row/column in the CIFTI-2 vector/matrix represents a parcel of voxels/vertices This Axis describes which parcel is represented by each row/column. @@ -715,13 +715,13 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert For each parcel the vertices are represented by a mapping from brain structure name to (M, ) index array affine : np.ndarray - (4, 4) array mapping voxel indices to mm space (not needed for CIfTI2 files only + (4, 4) array mapping voxel indices to mm space (not needed for CIFTI-2 files only covering the surface) volume_shape : tuple of three integers - shape of the volume in which the voxels were defined (not needed for CIfTI2 files only + shape of the volume in which the voxels were defined (not needed for CIFTI-2 files only covering the surface) nvertices : dict[String -> int] - maps names of surface elements to integers (not needed for volumetric CIfTI2 files) + maps names of surface elements to integers (not needed for volumetric CIFTI-2 files) """ self.name = np.asanyarray(name, dtype='U') as_array = np.asanyarray(voxels) @@ -794,7 +794,7 @@ def from_brain_models(cls, named_brain_models): @classmethod def from_index_mapping(cls, mim): """ - Creates a new Parcels axis based on a CIfTI2 dataset + Creates a new Parcels axis based on a CIFTI-2 dataset Parameters ---------- @@ -835,12 +835,12 @@ def from_index_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the Parcel to a MatrixIndicesMap for storage in CIfTI2 format + Converts the Parcel to a MatrixIndicesMap for storage in CIFTI-2 format Parameters ---------- dim : int - which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) + which dimension of the CIFTI-2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -1000,7 +1000,7 @@ def get_element(self, index): class Scalar(Axis): """ - Along this axis of the CIfTI2 vector/matrix each row/column has been given + Along this axis of the CIFTI-2 vector/matrix each row/column has been given a unique name and optionally metadata """ @@ -1027,7 +1027,7 @@ def __init__(self, name, meta=None): @classmethod def from_index_mapping(cls, mim): """ - Creates a new Scalar axis based on a CIfTI2 dataset + Creates a new Scalar axis based on a CIFTI-2 dataset Parameters ---------- @@ -1043,12 +1043,12 @@ def from_index_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the hcp_labels to a MatrixIndicesMap for storage in CIfTI2 format + Converts the hcp_labels to a MatrixIndicesMap for storage in CIFTI-2 format Parameters ---------- dim : int - which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) + which dimension of the CIFTI-2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -1126,9 +1126,9 @@ def get_element(self, index): class Label(Axis): """ - Defines CIfTI2 axis for label array. + Defines CIFTI-2 axis for label array. - Along this axis of the CIfTI2 vector/matrix each row/column has been given a unique name, + Along this axis of the CIFTI-2 vector/matrix each row/column has been given a unique name, label table, and optionally metadata """ @@ -1161,7 +1161,7 @@ def __init__(self, name, label, meta=None): @classmethod def from_index_mapping(cls, mim): """ - Creates a new Label axis based on a CIfTI2 dataset + Creates a new Label axis based on a CIFTI-2 dataset Parameters ---------- @@ -1178,12 +1178,12 @@ def from_index_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the hcp_labels to a MatrixIndicesMap for storage in CIfTI2 format + Converts the hcp_labels to a MatrixIndicesMap for storage in CIFTI-2 format Parameters ---------- dim : int - which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) + which dimension of the CIFTI-2 vector/matrix is described by this dataset (zero-based) Returns ------- @@ -1272,7 +1272,7 @@ def get_element(self, index): class Series(Axis): """ - Along this axis of the CIfTI2 vector/matrix the rows/columns increase monotonously in time + Along this axis of the CIFTI-2 vector/matrix the rows/columns increase monotonously in time This Axis describes the time point of each row/column. """ @@ -1305,7 +1305,7 @@ def time(self): @classmethod def from_index_mapping(cls, mim): """ - Creates a new Series axis based on a CIfTI2 dataset + Creates a new Series axis based on a CIFTI-2 dataset Parameters ---------- @@ -1321,12 +1321,12 @@ def from_index_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the Series to a MatrixIndicesMap for storage in CIfTI2 format + Converts the Series to a MatrixIndicesMap for storage in CIFTI-2 format Parameters ---------- dim : int - which dimension of the CIfTI2 vector/matrix is described by this dataset (zero-based) + which dimension of the CIFTI-2 vector/matrix is described by this dataset (zero-based) Returns ------- diff --git a/nibabel/cifti2/parse_cifti2.py b/nibabel/cifti2/parse_cifti2.py index f0df76ac7d..608636a446 100644 --- a/nibabel/cifti2/parse_cifti2.py +++ b/nibabel/cifti2/parse_cifti2.py @@ -94,7 +94,7 @@ def may_contain_header(klass, binaryblock): @staticmethod def _chk_qfac(hdr, fix=False): - # Allow qfac of 0 without complaint for CIFTI2 + # Allow qfac of 0 without complaint for CIFTI-2 rep = Report(HeaderDataError) if hdr['pixdim'][0] in (-1, 0, 1): return hdr, rep @@ -127,7 +127,7 @@ class _Cifti2AsNiftiImage(Nifti2Image): class Cifti2Parser(xml.XmlParser): - '''Class to parse an XML string into a CIFTI2 header object''' + '''Class to parse an XML string into a CIFTI-2 header object''' def __init__(self, encoding=None, buffer_size=3500000, verbose=0): super(Cifti2Parser, self).__init__(encoding=encoding, buffer_size=buffer_size, @@ -164,7 +164,7 @@ def StartElementHandler(self, name, attrs): parent = self.struct_state[-1] if not isinstance(parent, Cifti2Header): raise Cifti2HeaderError( - 'Matrix element can only be a child of the CIFTI2 Header element' + 'Matrix element can only be a child of the CIFTI-2 Header element' ) parent.matrix = matrix self.struct_state.append(matrix) @@ -175,7 +175,8 @@ def StartElementHandler(self, name, attrs): parent = self.struct_state[-1] if not isinstance(parent, (Cifti2Matrix, Cifti2NamedMap)): raise Cifti2HeaderError( - 'MetaData element can only be a child of the CIFTI2 Matrix or NamedMap elements' + 'MetaData element can only be a child of the CIFTI-2 Matrix ' + 'or NamedMap elements' ) self.struct_state.append(meta) @@ -207,7 +208,7 @@ def StartElementHandler(self, name, attrs): matrix = self.struct_state[-1] if not isinstance(matrix, Cifti2Matrix): raise Cifti2HeaderError( - 'MatrixIndicesMap element can only be a child of the CIFTI2 Matrix element' + 'MatrixIndicesMap element can only be a child of the CIFTI-2 Matrix element' ) matrix.append(mim) self.struct_state.append(mim) @@ -218,7 +219,7 @@ def StartElementHandler(self, name, attrs): mim = self.struct_state[-1] if not isinstance(mim, Cifti2MatrixIndicesMap): raise Cifti2HeaderError( - 'NamedMap element can only be a child of the CIFTI2 MatrixIndicesMap element' + 'NamedMap element can only be a child of the CIFTI-2 MatrixIndicesMap element' ) self.struct_state.append(named_map) mim.append(named_map) @@ -234,7 +235,7 @@ def StartElementHandler(self, name, attrs): lata = Cifti2LabelTable() if not isinstance(named_map, Cifti2NamedMap): raise Cifti2HeaderError( - 'LabelTable element can only be a child of the CIFTI2 NamedMap element' + 'LabelTable element can only be a child of the CIFTI-2 NamedMap element' ) self.fsm_state.append('LabelTable') self.struct_state.append(lata) @@ -244,7 +245,7 @@ def StartElementHandler(self, name, attrs): lata = self.struct_state[-1] if not isinstance(lata, Cifti2LabelTable): raise Cifti2HeaderError( - 'Label element can only be a child of the CIFTI2 LabelTable element' + 'Label element can only be a child of the CIFTI-2 LabelTable element' ) label = Cifti2Label() label.key = int(attrs["Key"]) @@ -260,7 +261,7 @@ def StartElementHandler(self, name, attrs): named_map = self.struct_state[-1] if not isinstance(named_map, Cifti2NamedMap): raise Cifti2HeaderError( - 'MapName element can only be a child of the CIFTI2 NamedMap element' + 'MapName element can only be a child of the CIFTI-2 NamedMap element' ) self.fsm_state.append('MapName') @@ -271,7 +272,7 @@ def StartElementHandler(self, name, attrs): mim = self.struct_state[-1] if not isinstance(mim, Cifti2MatrixIndicesMap): raise Cifti2HeaderError( - 'Surface element can only be a child of the CIFTI2 MatrixIndicesMap element' + 'Surface element can only be a child of the CIFTI-2 MatrixIndicesMap element' ) if mim.indices_map_to_data_type != "CIFTI_INDEX_TYPE_PARCELS": raise Cifti2HeaderError( @@ -287,7 +288,7 @@ def StartElementHandler(self, name, attrs): mim = self.struct_state[-1] if not isinstance(mim, Cifti2MatrixIndicesMap): raise Cifti2HeaderError( - 'Parcel element can only be a child of the CIFTI2 MatrixIndicesMap element' + 'Parcel element can only be a child of the CIFTI-2 MatrixIndicesMap element' ) parcel.name = attrs["Name"] mim.append(parcel) @@ -299,7 +300,7 @@ def StartElementHandler(self, name, attrs): parcel = self.struct_state[-1] if not isinstance(parcel, Cifti2Parcel): raise Cifti2HeaderError( - 'Vertices element can only be a child of the CIFTI2 Parcel element' + 'Vertices element can only be a child of the CIFTI-2 Parcel element' ) vertices.brain_structure = attrs["BrainStructure"] if vertices.brain_structure not in CIFTI_BRAIN_STRUCTURES: @@ -315,7 +316,7 @@ def StartElementHandler(self, name, attrs): parent = self.struct_state[-1] if not isinstance(parent, (Cifti2Parcel, Cifti2BrainModel)): raise Cifti2HeaderError( - 'VoxelIndicesIJK element can only be a child of the CIFTI2 ' + 'VoxelIndicesIJK element can only be a child of the CIFTI-2 ' 'Parcel or BrainModel elements' ) parent.voxel_indices_ijk = Cifti2VoxelIndicesIJK() @@ -325,7 +326,7 @@ def StartElementHandler(self, name, attrs): mim = self.struct_state[-1] if not isinstance(mim, Cifti2MatrixIndicesMap): raise Cifti2HeaderError( - 'Volume element can only be a child of the CIFTI2 MatrixIndicesMap element' + 'Volume element can only be a child of the CIFTI-2 MatrixIndicesMap element' ) dimensions = tuple([int(val) for val in attrs["VolumeDimensions"].split(',')]) @@ -339,7 +340,7 @@ def StartElementHandler(self, name, attrs): if not isinstance(volume, Cifti2Volume): raise Cifti2HeaderError( 'TransformationMatrixVoxelIndicesIJKtoXYZ element can only be a child ' - 'of the CIFTI2 Volume element' + 'of the CIFTI-2 Volume element' ) transform = Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ() transform.meter_exponent = int(attrs["MeterExponent"]) @@ -354,7 +355,7 @@ def StartElementHandler(self, name, attrs): if not isinstance(mim, Cifti2MatrixIndicesMap): raise Cifti2HeaderError( 'BrainModel element can only be a child ' - 'of the CIFTI2 MatrixIndicesMap element' + 'of the CIFTI-2 MatrixIndicesMap element' ) if mim.indices_map_to_data_type != "CIFTI_INDEX_TYPE_BRAIN_MODELS": raise Cifti2HeaderError( @@ -386,7 +387,7 @@ def StartElementHandler(self, name, attrs): if not isinstance(model, Cifti2BrainModel): raise Cifti2HeaderError( 'VertexIndices element can only be a child ' - 'of the CIFTI2 BrainModel element' + 'of the CIFTI-2 BrainModel element' ) self.fsm_state.append('VertexIndices') model.vertex_indices = index diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 150bb88300..cd7682408d 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -101,7 +101,7 @@ def get_axes(): def test_brain_models(): """ - Tests the introspection and creation of CIFTI2 BrainModel axes + Tests the introspection and creation of CIFTI-2 BrainModel axes """ bml = list(get_brain_models()) assert len(bml[0]) == 3 @@ -306,7 +306,7 @@ def test_brain_models(): def test_parcels(): """ - Test the introspection and creation of CIFTI2 Parcel axes + Test the introspection and creation of CIFTI-2 Parcel axes """ prc = get_parcels() assert isinstance(prc, axes.Parcels) @@ -446,7 +446,7 @@ def test_parcels(): def test_scalar(): """ - Test the introspection and creation of CIFTI2 Scalar axes + Test the introspection and creation of CIFTI-2 Scalar axes """ sc = get_scalar() assert len(sc) == 3 @@ -494,7 +494,7 @@ def test_scalar(): def test_label(): """ - Test the introspection and creation of CIFTI2 Scalar axes + Test the introspection and creation of CIFTI-2 Scalar axes """ lab = get_label() assert len(lab) == 3 @@ -549,7 +549,7 @@ def test_label(): def test_series(): """ - Test the introspection and creation of CIFTI2 Series axes + Test the introspection and creation of CIFTI-2 Series axes """ sr = list(get_series()) assert sr[0].unit == 'SECOND' @@ -618,7 +618,7 @@ def test_series(): def test_writing(): """ - Tests the writing and reading back in of custom created CIFTI2 axes + Tests the writing and reading back in of custom created CIFTI-2 axes """ for ax1 in get_axes(): for ax2 in get_axes(): @@ -628,7 +628,7 @@ def test_writing(): def test_common_interface(): """ - Tests the common interface for all custom created CIFTI2 axes + Tests the common interface for all custom created CIFTI-2 axes """ for axis1, axis2 in zip(get_axes(), get_axes()): assert axis1 == axis2 diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index ce71b92bcc..6054c126b0 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -1,4 +1,4 @@ -""" Testing CIFTI2 objects +""" Testing CIFTI-2 objects """ import collections from xml.etree import ElementTree diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 521e112847..e4970625a4 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -43,7 +43,7 @@ def test_read_nifti2(): - # Error trying to read a CIFTI2 image from a NIfTI2-only image. + # Error trying to read a CIFTI-2 image from a NIfTI2-only image. filemap = ci.Cifti2Image.make_file_map() for k in filemap: filemap[k].fileobj = io.open(NIFTI2_DATA) diff --git a/nibabel/cifti2/tests/test_name.py b/nibabel/cifti2/tests/test_name.py index a73c5e8c46..b656f88875 100644 --- a/nibabel/cifti2/tests/test_name.py +++ b/nibabel/cifti2/tests/test_name.py @@ -10,7 +10,7 @@ def test_name_conversion(): """ - Tests the automatic name conversion to a format recognized by CIFTI2 + Tests the automatic name conversion to a format recognized by CIFTI-2 """ func = cifti2_axes.BrainModel.to_cifti_brain_structure_name for base_name, input_names in equivalents: diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index 9ead1e3088..01bc742a22 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -1,4 +1,4 @@ -"""Tests the generation of new CIFTI2 files from scratch +"""Tests the generation of new CIFTI-2 files from scratch Contains a series of functions to create and check each of the 5 CIFTI index types (i.e. BRAIN_MODELS, PARCELS, SCALARS, LABELS, and SERIES). From 532bed968be43a853298e3f5ab432684ba0ab586 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 10:53:20 +0000 Subject: [PATCH 062/689] RF: appended Axis to the different axes classes --- nibabel/cifti2/__init__.py | 2 +- nibabel/cifti2/cifti2_axes.py | 205 +++++++++++---------- nibabel/cifti2/tests/test_axes.py | 138 +++++++------- nibabel/cifti2/tests/test_cifti2io_axes.py | 20 +- nibabel/cifti2/tests/test_name.py | 2 +- 5 files changed, 184 insertions(+), 183 deletions(-) diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index dc88a24b48..9dc6dd68b8 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -26,4 +26,4 @@ Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ, Cifti2Vertices, Cifti2Volume, CIFTI_BRAIN_STRUCTURES, Cifti2HeaderError, CIFTI_MODEL_TYPES, load, save) -from .cifti2_axes import (Axis, BrainModel, Parcels, Series, Label, Scalar) \ No newline at end of file +from .cifti2_axes import (Axis, BrainModelAxis, ParcelsAxis, SeriesAxis, LabelAxis, ScalarAxis) \ No newline at end of file diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index e0f8c0f7ea..90d3c6f4c4 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -4,11 +4,11 @@ These axes provide an alternative interface to the information in the CIFTI-2 header. Each type of CIFTI-2 axes describing the rows/columns in a CIFTI-2 matrix is given a unique class: -* :class:`BrainModel`: each row/column is a voxel or vertex -* :class:`Parcels`: each row/column is a group of voxels and/or vertices -* :class:`Scalar`: each row/column has a unique name (with optional meta-data) -* :class:`Label`: each row/column has a unique name and label table (with optional meta-data) -* :class:`Series`: each row/column is a timepoint, which increases monotonically +* :class:`BrainModelAxis`: each row/column is a voxel or vertex +* :class:`ParcelsAxis`: each row/column is a group of voxels and/or vertices +* :class:`ScalarAxis`: each row/column has a unique name (with optional meta-data) +* :class:`LabelAxis`: each row/column has a unique name and label table (with optional meta-data) +* :class:`SeriesAxis`: each row/column is a timepoint, which increases monotonically All of these classes are derived from the :class:`Axis` class. @@ -20,43 +20,43 @@ CIFTI-2 :class:`Axis` objects of the same type can be concatenated using the '+'-operator. Numpy indexing also works on axes -(except for Series objects, which have to remain monotonically increasing or decreasing). +(except for SeriesAxis objects, which have to remain monotonically increasing or decreasing). Creating new CIFTI-2 axes ----------------------- New :class:`Axis` objects can be constructed by providing a description for what is contained in each row/column of the described tensor. For each :class:`Axis` sub-class this descriptor is: -* :class:`BrainModel`: a CIFTI-2 structure name and a voxel or vertex index -* :class:`Parcels`: a name and a sequence of voxel and vertex indices -* :class:`Scalar`: a name and optionally a dict of meta-data -* :class:`Label`: a name, dict of label index to name and colour, +* :class:`BrainModelAxis`: a CIFTI-2 structure name and a voxel or vertex index +* :class:`ParcelsAxis`: a name and a sequence of voxel and vertex indices +* :class:`ScalarAxis`: a name and optionally a dict of meta-data +* :class:`LabelAxis`: a name, dict of label index to name and colour, and optionally a dict of meta-data -* :class:`Series`: the time-point of each row/column is set by setting the start, stop, size, +* :class:`SeriesAxis`: the time-point of each row/column is set by setting the start, stop, size, and unit of the time-series -Several helper functions exist to create new :class:`BrainModel` axes: +Several helper functions exist to create new :class:`BrainModelAxis` axes: -* :meth:`BrainModel.from_mask` creates a new BrainModel volume covering the +* :meth:`BrainModelAxis.from_mask` creates a new BrainModelAxis volume covering the non-zero values of a mask -* :meth:`BrainModel.from_surface` creates a new BrainModel surface covering the provided +* :meth:`BrainModelAxis.from_surface` creates a new BrainModelAxis surface covering the provided indices of a surface -A :class:`Parcels` axis can be created from a sequence of :class:`BrainModel` axes using -:meth:`Parcels.from_brain_models`. +A :class:`ParcelsAxis` axis can be created from a sequence of :class:`BrainModelAxis` axes using +:meth:`ParcelsAxis.from_brain_models`. Examples -------- We can create brain models covering the left cortex and left thalamus using: >>> from nibabel import cifti2 ->>> bm_cortex = cifti2.BrainModel.from_mask(cortex_mask, +>>> bm_cortex = cifti2.BrainModelAxis.from_mask(cortex_mask, ... brain_structure='cortex_left') # doctest: +SKIP ->>> bm_thal = cifti2.BrainModel.from_mask(thalamus_mask, affine=affine, +>>> bm_thal = cifti2.BrainModelAxis.from_mask(thalamus_mask, affine=affine, ... brain_structure='thalamus_left') # doctest: +SKIP Brain structure names automatically get converted to valid CIFTI-2 indentifiers using -:meth:`BrainModel.to_cifti_brain_structure_name`. +:meth:`BrainModelAxis.to_cifti_brain_structure_name`. A 1-dimensional mask will be automatically interpreted as a surface element and a 3-dimensional mask as a volume element. @@ -83,9 +83,9 @@ and ('CIFTI_STRUCTURE_THALAMUS_LEFT', slice(, None), bm_thal) -Parcels can be constructed from selections of these brain models: +ParcelsAxis can be constructed from selections of these brain models: ->>> parcel = cifti2.Parcels.from_brain_models([ +>>> parcel = cifti2.ParcelsAxis.from_brain_models([ ... ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices ... ('volume_parcel', bm_thal), # contains thalamus ... ('combined_parcel', bm_full[[1, 8, 10, 120, 127]) # contains selected voxels/vertices @@ -94,7 +94,7 @@ Time series are represented by their starting time (typically 0), step size (i.e. sampling time or TR), and number of elements: ->>> series = cifti2.Series(start=0, step=100, size=5000) # doctest: +SKIP +>>> series = cifti2.SeriesAxis(start=0, step=100, size=5000) # doctest: +SKIP So a header for fMRI data with a TR of 100 ms covering the left cortex and thalamus with 5000 timepoints could be created with @@ -104,7 +104,7 @@ Similarly the curvature and cortical thickness on the left cortex could be stored using a header like: ->>> cifti2.Cifti2Header.from_axes((cifti.Scalar(['curvature', 'thickness'], +>>> cifti2.Cifti2Header.from_axes((cifti.ScalarAxis(['curvature', 'thickness'], ... bm_cortex)) # doctest: +SKIP """ import numpy as np @@ -126,11 +126,11 @@ def from_index_mapping(mim): ------- subtype of Axis """ - return_type = {'CIFTI_INDEX_TYPE_SCALARS': Scalar, - 'CIFTI_INDEX_TYPE_LABELS': Label, - 'CIFTI_INDEX_TYPE_SERIES': Series, - 'CIFTI_INDEX_TYPE_BRAIN_MODELS': BrainModel, - 'CIFTI_INDEX_TYPE_PARCELS': Parcels} + return_type = {'CIFTI_INDEX_TYPE_SCALARS': ScalarAxis, + 'CIFTI_INDEX_TYPE_LABELS': LabelAxis, + 'CIFTI_INDEX_TYPE_SERIES': SeriesAxis, + 'CIFTI_INDEX_TYPE_BRAIN_MODELS': BrainModelAxis, + 'CIFTI_INDEX_TYPE_PARCELS': ParcelsAxis} return return_type[mim.indices_map_to_data_type].from_index_mapping(mim) @@ -218,7 +218,7 @@ def __getitem__(self, item): pass -class BrainModel(Axis): +class BrainModelAxis(Axis): """ Each row/column in the CIFTI-2 vector/matrix represents a single vertex or voxel @@ -228,15 +228,15 @@ class BrainModel(Axis): def __init__(self, name, voxel=None, vertex=None, affine=None, volume_shape=None, nvertices=None): """ - New BrainModel axes can be constructed by passing on the greyordinate brain-structure + New BrainModelAxis axes can be constructed by passing on the greyordinate brain-structure names and voxel/vertex indices to the constructor or by one of the factory methods: - - :py:meth:`~BrainModel.from_mask`: creates surface or volumetric BrainModel axis + - :py:meth:`~BrainModelAxis.from_mask`: creates surface or volumetric BrainModelAxis axis from respectively 1D or 3D masks - - :py:meth:`~BrainModel.from_surface`: creates a surface BrainModel axis + - :py:meth:`~BrainModelAxis.from_surface`: creates a surface BrainModelAxis axis - The resulting BrainModel axes can be concatenated by adding them together. + The resulting BrainModelAxis axes can be concatenated by adding them together. Parameters ---------- @@ -291,7 +291,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, else: if affine is None or volume_shape is None: raise ValueError("Affine and volume shape should be defined " - "for BrainModel containing voxels") + "for BrainModelAxis containing voxels") self.affine = affine self.volume_shape = volume_shape @@ -303,18 +303,18 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, for check_name in ('name', 'voxel', 'vertex'): shape = (self.size, 3) if check_name == 'voxel' else (self.size, ) if getattr(self, check_name).shape != shape: - raise ValueError("Input {} has incorrect shape ({}) for BrainModel axis".format( + raise ValueError("Input {} has incorrect shape ({}) for BrainModelAxis axis".format( check_name, getattr(self, check_name).shape)) @classmethod def from_mask(cls, mask, name='other', affine=None): """ - Creates a new BrainModel axis describing the provided mask + Creates a new BrainModelAxis axis describing the provided mask Parameters ---------- mask : np.ndarray - all non-zero voxels will be included in the BrainModel axis + all non-zero voxels will be included in the BrainModelAxis axis should be (Nx, Ny, Nz) array for volume mask or (Nvertex, ) array for surface mask name : str Name of the brain structure (e.g. 'CortexRight', 'thalamus_left' or 'brain_stem') @@ -324,7 +324,7 @@ def from_mask(cls, mask, name='other', affine=None): Returns ------- - BrainModel which covers the provided mask + BrainModelAxis which covers the provided mask """ if affine is None: affine = np.eye(4) @@ -344,7 +344,7 @@ def from_mask(cls, mask, name='other', affine=None): @classmethod def from_surface(cls, vertices, nvertex, name='Other'): """ - Creates a new BrainModel axis describing the vertices on a surface + Creates a new BrainModelAxis axis describing the vertices on a surface Parameters ---------- @@ -357,7 +357,7 @@ def from_surface(cls, vertices, nvertex, name='Other'): Returns ------- - BrainModel which covers (part of) the surface + BrainModelAxis which covers (part of) the surface """ cifti_name = cls.to_cifti_brain_structure_name(name) return cls(cifti_name, vertex=vertices, @@ -374,7 +374,7 @@ def from_index_mapping(cls, mim): Returns ------- - BrainModel + BrainModelAxis """ nbm = sum(bm.index_count for bm in mim.brain_models) voxel = np.full((nbm, 3), fill_value=-1, dtype=int) @@ -588,7 +588,7 @@ def __len__(self): return self.name.size def __eq__(self, other): - if not isinstance(other, BrainModel) or len(self) != len(other): + if not isinstance(other, BrainModelAxis) or len(self) != len(other): return False if xor(self.affine is None, other.affine is None): return False @@ -608,14 +608,14 @@ def __add__(self, other): Parameters ---------- - other : BrainModel + other : BrainModelAxis brain model to be appended to the current one Returns ------- - BrainModel + BrainModelAxis """ - if not isinstance(other, BrainModel): + if not isinstance(other, BrainModelAxis): return NotImplemented if self.affine is None: affine, shape = other.affine, other.volume_shape @@ -656,12 +656,12 @@ def __getitem__(self, item): - vertex index if it is a surface element, otherwise array with 3 voxel indices - structure.BrainStructure object describing the brain structure the element was taken from - Otherwise returns a new BrainModel + Otherwise returns a new BrainModelAxis """ if isinstance(item, integer_types): return self.get_element(item) if isinstance(item, string_types): - raise IndexError("Can not index an Axis with a string (except for Parcels)") + raise IndexError("Can not index an Axis with a string (except for ParcelsAxis)") return self.__class__(self.name[item], self.voxel[item], self.vertex[item], self.affine, self.volume_shape, self.nvertices) @@ -688,7 +688,7 @@ def get_element(self, index): return element_type, struct[index], self.name[index] -class Parcels(Axis): +class ParcelsAxis(Axis): """ Each row/column in the CIFTI-2 vector/matrix represents a parcel of voxels/vertices @@ -700,8 +700,9 @@ class Parcels(Axis): def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvertices=None): """ - Use of this constructor is not recommended. New Parcels axes can be constructed more easily - from a sequence of BrainModel axes using :py:meth:`~Parcels.from_brain_models` + Use of this constructor is not recommended. New ParcelsAxis axes can be constructed more + easily from a sequence of BrainModelAxis axes using + :py:meth:`~ParcelsAxis.from_brain_models` Parameters ---------- @@ -738,7 +739,7 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert if nvertices is None: self.nvertices = {} else: - self.nvertices = {BrainModel.to_cifti_brain_structure_name(name): number + self.nvertices = {BrainModelAxis.to_cifti_brain_structure_name(name): number for name, number in nvertices.items()} for check_name in ('name', 'voxels', 'vertices'): @@ -749,16 +750,16 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert @classmethod def from_brain_models(cls, named_brain_models): """ - Creates a Parcel axis from a list of BrainModel axes with names + Creates a Parcel axis from a list of BrainModelAxis axes with names Parameters ---------- - named_brain_models : iterable of 2-element tuples of string and BrainModel + named_brain_models : iterable of 2-element tuples of string and BrainModelAxis list of (parcel name, brain model representation) pairs defining each parcel Returns ------- - Parcels + ParcelsAxis """ nparcels = len(named_brain_models) affine = None @@ -789,7 +790,7 @@ def from_brain_models(cls, named_brain_models): nvertices[name] = bm.nvertices[name] vertices[name] = bm_part.vertex all_vertices[idx_parcel] = vertices - return Parcels(all_names, all_voxels, all_vertices, affine, volume_shape, nvertices) + return ParcelsAxis(all_names, all_voxels, all_vertices, affine, volume_shape, nvertices) @classmethod def from_index_mapping(cls, mim): @@ -802,7 +803,7 @@ def from_index_mapping(cls, mim): Returns ------- - Parcels + ParcelsAxis """ nparcels = len(list(mim.parcels)) all_names = [] @@ -928,14 +929,14 @@ def __add__(self, other): Parameters ---------- - other : Parcels + other : ParcelsAxis parcel to be appended to the current one Returns ------- Parcel """ - if not isinstance(other, Parcels): + if not isinstance(other, ParcelsAxis): return NotImplemented if self.affine is None: affine, shape = other.affine, other.volume_shape @@ -943,12 +944,12 @@ def __add__(self, other): affine, shape = self.affine, self.volume_shape if other.affine is not None and (not np.allclose(other.affine, affine) or other.volume_shape != shape): - raise ValueError("Trying to concatenate two Parcels defined " + raise ValueError("Trying to concatenate two ParcelsAxis defined " "in a different brain volume") nvertices = dict(self.nvertices) for name, value in other.nvertices.items(): if name in nvertices.keys() and nvertices[name] != value: - raise ValueError("Trying to concatenate two Parcels with inconsistent " + raise ValueError("Trying to concatenate two ParcelsAxis with inconsistent " "number of vertices for %s" % name) nvertices[name] = value @@ -998,7 +999,7 @@ def get_element(self, index): return self.name[index], self.voxels[index], self.vertices[index] -class Scalar(Axis): +class ScalarAxis(Axis): """ Along this axis of the CIFTI-2 vector/matrix each row/column has been given a unique name and optionally metadata @@ -1021,7 +1022,7 @@ def __init__(self, name, meta=None): for check_name in ('name', 'meta'): if getattr(self, check_name).shape != (self.size, ): - raise ValueError("Input {} has incorrect shape ({}) for Scalar axis".format( + raise ValueError("Input {} has incorrect shape ({}) for ScalarAxis axis".format( check_name, getattr(self, check_name).shape)) @classmethod @@ -1035,7 +1036,7 @@ def from_index_mapping(cls, mim): Returns ------- - Scalar + ScalarAxis """ names = [nm.map_name for nm in mim.named_maps] meta = [{} if nm.metadata is None else dict(nm.metadata) for nm in mim.named_maps] @@ -1070,14 +1071,14 @@ def __eq__(self, other): Parameters ---------- - other : Scalar + other : ScalarAxis scalar axis to be compared Returns ------- bool : False if type, length or content do not match """ - if not isinstance(other, Scalar) or self.size != other.size: + if not isinstance(other, ScalarAxis) or self.size != other.size: return False return np.array_equal(self.name, other.name) and np.array_equal(self.meta, other.meta) @@ -1087,16 +1088,16 @@ def __add__(self, other): Parameters ---------- - other : Scalar + other : ScalarAxis scalar axis to be appended to the current one Returns ------- - Scalar + ScalarAxis """ - if not isinstance(other, Scalar): + if not isinstance(other, ScalarAxis): return NotImplemented - return Scalar( + return ScalarAxis( np.append(self.name, other.name), np.append(self.meta, other.meta), ) @@ -1124,7 +1125,7 @@ def get_element(self, index): return self.name[index], self.meta[index] -class Label(Axis): +class LabelAxis(Axis): """ Defines CIFTI-2 axis for label array. @@ -1155,7 +1156,7 @@ def __init__(self, name, label, meta=None): for check_name in ('name', 'meta', 'label'): if getattr(self, check_name).shape != (self.size, ): - raise ValueError("Input {} has incorrect shape ({}) for Label axis".format( + raise ValueError("Input {} has incorrect shape ({}) for LabelAxis axis".format( check_name, getattr(self, check_name).shape)) @classmethod @@ -1169,12 +1170,12 @@ def from_index_mapping(cls, mim): Returns ------- - Label + LabelAxis """ tables = [{key: (value.label, value.rgba) for key, value in nm.label_table.items()} for nm in mim.named_maps] - rest = Scalar.from_index_mapping(mim) - return Label(rest.name, tables, rest.meta) + rest = ScalarAxis.from_index_mapping(mim) + return LabelAxis(rest.name, tables, rest.meta) def to_mapping(self, dim): """ @@ -1210,14 +1211,14 @@ def __eq__(self, other): Parameters ---------- - other : Label + other : LabelAxis label axis to be compared Returns ------- bool : False if type, length or content do not match """ - if not isinstance(other, Label) or self.size != other.size: + if not isinstance(other, LabelAxis) or self.size != other.size: return False return ( np.array_equal(self.name, other.name) and @@ -1231,16 +1232,16 @@ def __add__(self, other): Parameters ---------- - other : Label + other : LabelAxis label axis to be appended to the current one Returns ------- - Label + LabelAxis """ - if not isinstance(other, Label): + if not isinstance(other, LabelAxis): return NotImplemented - return Label( + return LabelAxis( np.append(self.name, other.name), np.append(self.label, other.label), np.append(self.meta, other.meta), @@ -1270,7 +1271,7 @@ def get_element(self, index): return self.name[index], self.label[index], self.meta[index] -class Series(Axis): +class SeriesAxis(Axis): """ Along this axis of the CIFTI-2 vector/matrix the rows/columns increase monotonously in time @@ -1280,7 +1281,7 @@ class Series(Axis): def __init__(self, start, step, size, unit="SECOND"): """ - Creates a new Series axis + Creates a new SeriesAxis axis Parameters ---------- @@ -1305,7 +1306,7 @@ def time(self): @classmethod def from_index_mapping(cls, mim): """ - Creates a new Series axis based on a CIFTI-2 dataset + Creates a new SeriesAxis axis based on a CIFTI-2 dataset Parameters ---------- @@ -1313,7 +1314,7 @@ def from_index_mapping(cls, mim): Returns ------- - Series + SeriesAxis """ start = mim.series_start * 10 ** mim.series_exponent step = mim.series_step * 10 ** mim.series_exponent @@ -1321,7 +1322,7 @@ def from_index_mapping(cls, mim): def to_mapping(self, dim): """ - Converts the Series to a MatrixIndicesMap for storage in CIFTI-2 format + Converts the SeriesAxis to a MatrixIndicesMap for storage in CIFTI-2 format Parameters ---------- @@ -1349,7 +1350,7 @@ def unit(self): @unit.setter def unit(self, value): if value.upper() not in ("SECOND", "HERTZ", "METER", "RADIAN"): - raise ValueError("Series unit should be one of " + + raise ValueError("SeriesAxis unit should be one of " + "('second', 'hertz', 'meter', or 'radian'") self._unit = value.upper() @@ -1361,7 +1362,7 @@ def __eq__(self, other): True if start, step, size, and unit are the same. """ return ( - isinstance(other, Series) and + isinstance(other, SeriesAxis) and self.start == other.start and self.step == other.step and self.size == other.size and @@ -1370,30 +1371,30 @@ def __eq__(self, other): def __add__(self, other): """ - Concatenates two Series + Concatenates two SeriesAxis Parameters ---------- - other : Series - Time Series to append at the end of the current time Series. - Note that the starting time of the other time Series is ignored. + other : SeriesAxis + Time SeriesAxis to append at the end of the current time SeriesAxis. + Note that the starting time of the other time SeriesAxis is ignored. Returns ------- - Series - New time Series with the concatenation of the two + SeriesAxis + New time SeriesAxis with the concatenation of the two Raises ------ ValueError - raised if the repetition time of the two time Series is different + raised if the repetition time of the two time SeriesAxis is different """ - if isinstance(other, Series): + if isinstance(other, SeriesAxis): if other.step != self.step: - raise ValueError('Can only concatenate Series with the same step size') + raise ValueError('Can only concatenate SeriesAxis with the same step size') if other.unit != self.unit: - raise ValueError('Can only concatenate Series with the same unit') - return Series(self.start, self.step, self.size + other.size, self.unit) + raise ValueError('Can only concatenate SeriesAxis with the same unit') + return SeriesAxis(self.start, self.step, self.size + other.size, self.unit) return NotImplemented def __getitem__(self, item): @@ -1412,11 +1413,11 @@ def __getitem__(self, item): nelements = (idx_end - idx_start) // step if nelements < 0: nelements = 0 - return Series(idx_start * self.step + self.start, self.step * step, - nelements, self.unit) + return SeriesAxis(idx_start * self.step + self.start, self.step * step, + nelements, self.unit) elif isinstance(item, integer_types): return self.get_element(item) - raise IndexError('Series can only be indexed with integers or slices ' + raise IndexError('SeriesAxis can only be indexed with integers or slices ' 'without breaking the regular structure') def get_element(self, index): @@ -1436,6 +1437,6 @@ def get_element(self, index): if index < 0: index = self.size + index if index >= self.size or index < 0: - raise IndexError("index %i is out of range for Series with size %i" % + raise IndexError("index %i is out of range for SeriesAxis with size %i" % (original_index, self.size)) return self.start + self.step * index diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index cd7682408d..56457187a2 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -12,26 +12,26 @@ def get_brain_models(): """ - Generates a set of practice BrainModel axes + Generates a set of practice BrainModelAxis axes Yields ------ - BrainModel axis + BrainModelAxis axis """ mask = np.zeros(vol_shape) mask[0, 1, 2] = 1 mask[0, 4, 2] = True mask[0, 4, 0] = True - yield axes.BrainModel.from_mask(mask, 'ThalamusRight', rand_affine) + yield axes.BrainModelAxis.from_mask(mask, 'ThalamusRight', rand_affine) mask[0, 0, 0] = True - yield axes.BrainModel.from_mask(mask, affine=rand_affine) + yield axes.BrainModelAxis.from_mask(mask, affine=rand_affine) - yield axes.BrainModel.from_surface([0, 5, 10], 15, 'CortexLeft') - yield axes.BrainModel.from_surface([0, 5, 10, 13], 15) + yield axes.BrainModelAxis.from_surface([0, 5, 10], 15, 'CortexLeft') + yield axes.BrainModelAxis.from_surface([0, 5, 10, 13], 15) surface_mask = np.zeros(15, dtype='bool') surface_mask[[2, 9, 14]] = True - yield axes.BrainModel.from_mask(surface_mask, name='CortexRight') + yield axes.BrainModelAxis.from_mask(surface_mask, name='CortexRight') def get_parcels(): @@ -43,43 +43,43 @@ def get_parcels(): Parcel axis """ bml = list(get_brain_models()) - return axes.Parcels.from_brain_models([('mixed', bml[0] + bml[2]), ('volume', bml[1]), ('surface', bml[3])]) + return axes.ParcelsAxis.from_brain_models([('mixed', bml[0] + bml[2]), ('volume', bml[1]), ('surface', bml[3])]) def get_scalar(): """ - Generates a practice Scalar axis with names ('one', 'two', 'three') + Generates a practice ScalarAxis axis with names ('one', 'two', 'three') Returns ------- - Scalar axis + ScalarAxis axis """ - return axes.Scalar(['one', 'two', 'three']) + return axes.ScalarAxis(['one', 'two', 'three']) def get_label(): """ - Generates a practice Label axis with names ('one', 'two', 'three') and two labels + Generates a practice LabelAxis axis with names ('one', 'two', 'three') and two labels Returns ------- - Label axis + LabelAxis axis """ - return axes.Label(['one', 'two', 'three'], use_label) + return axes.LabelAxis(['one', 'two', 'three'], use_label) def get_series(): """ - Generates a set of 4 practice Series axes with different starting times/lengths/time steps and units + Generates a set of 4 practice SeriesAxis axes with different starting times/lengths/time steps and units Yields ------ - Series axis + SeriesAxis axis """ - yield axes.Series(3, 10, 4) - yield axes.Series(8, 10, 3) - yield axes.Series(3, 2, 4) - yield axes.Series(5, 10, 5, "HERTZ") + yield axes.SeriesAxis(3, 10, 4) + yield axes.SeriesAxis(8, 10, 3) + yield axes.SeriesAxis(3, 2, 4) + yield axes.SeriesAxis(5, 10, 5, "HERTZ") def get_axes(): @@ -101,7 +101,7 @@ def get_axes(): def test_brain_models(): """ - Tests the introspection and creation of CIFTI-2 BrainModel axes + Tests the introspection and creation of CIFTI-2 BrainModelAxis axes """ bml = list(get_brain_models()) assert len(bml[0]) == 3 @@ -109,7 +109,7 @@ def test_brain_models(): assert (bml[0].voxel == [[0, 1, 2], [0, 4, 0], [0, 4, 2]]).all() assert bml[0][1][0] == 'CIFTI_MODEL_TYPE_VOXELS' assert (bml[0][1][1] == [0, 4, 0]).all() - assert bml[0][1][2] == axes.BrainModel.to_cifti_brain_structure_name('thalamus_right') + assert bml[0][1][2] == axes.BrainModelAxis.to_cifti_brain_structure_name('thalamus_right') assert len(bml[1]) == 4 assert (bml[1].vertex == -1).all() assert (bml[1].voxel == [[0, 0, 0], [0, 1, 2], [0, 4, 0], [0, 4, 2]]).all() @@ -131,7 +131,7 @@ def test_brain_models(): structures = list(bm.iter_structures()) assert len(structures) == 1 name = structures[0][0] - assert name == axes.BrainModel.to_cifti_brain_structure_name(label) + assert name == axes.BrainModelAxis.to_cifti_brain_structure_name(label) if is_surface: assert bm.nvertices[name] == 15 else: @@ -172,51 +172,51 @@ def test_brain_models(): bmt['thalamus_left'] # Test the constructor - bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + bm_vox = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) assert np.all(bm_vox.name == ['CIFTI_STRUCTURE_THALAMUS_LEFT'] * 5) assert np.array_equal(bm_vox.vertex, np.full(5, -1)) assert np.array_equal(bm_vox.voxel, np.full((5, 3), 1)) with assert_raises(ValueError): # no volume shape - axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4)) + axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4)) with assert_raises(ValueError): # no affine - axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), volume_shape=(2, 3, 4)) + axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), volume_shape=(2, 3, 4)) with assert_raises(ValueError): # incorrect name - axes.BrainModel('random_name', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + axes.BrainModelAxis('random_name', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) with assert_raises(ValueError): # negative voxel indices - axes.BrainModel('thalamus_left', voxel=-np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + axes.BrainModelAxis('thalamus_left', voxel=-np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) with assert_raises(ValueError): # no voxels or vertices - axes.BrainModel('thalamus_left', affine=np.eye(4), volume_shape=(2, 3, 4)) + axes.BrainModelAxis('thalamus_left', affine=np.eye(4), volume_shape=(2, 3, 4)) with assert_raises(ValueError): # incorrect voxel shape - axes.BrainModel('thalamus_left', voxel=np.ones((5, 2), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) + axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 2), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) - bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + bm_vertex = axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) assert np.array_equal(bm_vertex.name, ['CIFTI_STRUCTURE_CORTEX_LEFT'] * 5) assert np.array_equal(bm_vertex.vertex, np.full(5, 1)) assert np.array_equal(bm_vertex.voxel, np.full((5, 3), -1)) with assert_raises(ValueError): - axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int)) + axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int)) with assert_raises(ValueError): - axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_right': 20}) + axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_right': 20}) with assert_raises(ValueError): - axes.BrainModel('cortex_left', vertex=-np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + axes.BrainModelAxis('cortex_left', vertex=-np.ones(5, dtype=int), nvertices={'cortex_left': 20}) # test from_mask errors with assert_raises(ValueError): # affine should be 4x4 matrix - axes.BrainModel.from_mask(np.arange(5) > 2, affine=np.ones(5)) + axes.BrainModelAxis.from_mask(np.arange(5) > 2, affine=np.ones(5)) with assert_raises(ValueError): # only 1D or 3D masks accepted - axes.BrainModel.from_mask(np.ones((5, 3))) + axes.BrainModelAxis.from_mask(np.ones((5, 3))) - # tests error in adding together or combining as Parcels - bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), - affine=np.eye(4), volume_shape=(2, 3, 4)) + # tests error in adding together or combining as ParcelsAxis + bm_vox = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4), volume_shape=(2, 3, 4)) bm_vox + bm_vox assert (bm_vertex + bm_vox)[:bm_vertex.size] == bm_vertex assert (bm_vox + bm_vertex)[:bm_vox.size] == bm_vox @@ -225,33 +225,33 @@ def test_brain_models(): assert np.all(bm_added.affine == bm_vox.affine) assert bm_added.volume_shape == bm_vox.volume_shape - axes.Parcels.from_brain_models([('a', bm_vox), ('b', bm_vox)]) + axes.ParcelsAxis.from_brain_models([('a', bm_vox), ('b', bm_vox)]) with assert_raises(Exception): bm_vox + get_label() - bm_other_shape = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), - affine=np.eye(4), volume_shape=(4, 3, 4)) + bm_other_shape = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4), volume_shape=(4, 3, 4)) with assert_raises(ValueError): bm_vox + bm_other_shape with assert_raises(ValueError): - axes.Parcels.from_brain_models([('a', bm_vox), ('b', bm_other_shape)]) - bm_other_affine = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), - affine=np.eye(4) * 2, volume_shape=(2, 3, 4)) + axes.ParcelsAxis.from_brain_models([('a', bm_vox), ('b', bm_other_shape)]) + bm_other_affine = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4) * 2, volume_shape=(2, 3, 4)) with assert_raises(ValueError): bm_vox + bm_other_affine with assert_raises(ValueError): - axes.Parcels.from_brain_models([('a', bm_vox), ('b', bm_other_affine)]) + axes.ParcelsAxis.from_brain_models([('a', bm_vox), ('b', bm_other_affine)]) - bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) - bm_other_number = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 30}) + bm_vertex = axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + bm_other_number = axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 30}) with assert_raises(ValueError): bm_vertex + bm_other_number with assert_raises(ValueError): - axes.Parcels.from_brain_models([('a', bm_vertex), ('b', bm_other_number)]) + axes.ParcelsAxis.from_brain_models([('a', bm_vertex), ('b', bm_other_number)]) # test equalities - bm_vox = axes.BrainModel('thalamus_left', voxel=np.ones((5, 3), dtype=int), - affine=np.eye(4), volume_shape=(2, 3, 4)) + bm_vox = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), + affine=np.eye(4), volume_shape=(2, 3, 4)) bm_other = deepcopy(bm_vox) assert bm_vox == bm_other bm_other.voxel[1, 0] = 0 @@ -259,7 +259,7 @@ def test_brain_models(): bm_other = deepcopy(bm_vox) bm_other.vertex[1] = 10 - assert bm_vox == bm_other, 'vertices are ignored in volumetric BrainModel' + assert bm_vox == bm_other, 'vertices are ignored in volumetric BrainModelAxis' bm_other = deepcopy(bm_vox) bm_other.name[1] = 'BRAIN_STRUCTURE_OTHER' @@ -278,11 +278,11 @@ def test_brain_models(): bm_other.volume_shape = (10, 3, 4) assert bm_vox != bm_other - bm_vertex = axes.BrainModel('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) + bm_vertex = axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) bm_other = deepcopy(bm_vertex) assert bm_vertex == bm_other bm_other.voxel[1, 0] = 0 - assert bm_vertex == bm_other, 'voxels are ignored in surface BrainModel' + assert bm_vertex == bm_other, 'voxels are ignored in surface BrainModelAxis' bm_other = deepcopy(bm_vertex) bm_other.vertex[1] = 10 @@ -309,7 +309,7 @@ def test_parcels(): Test the introspection and creation of CIFTI-2 Parcel axes """ prc = get_parcels() - assert isinstance(prc, axes.Parcels) + assert isinstance(prc, axes.ParcelsAxis) assert prc[0] == ('mixed', ) + prc['mixed'] assert prc['mixed'][0].shape == (3, 3) assert len(prc['mixed'][1]) == 1 @@ -426,7 +426,7 @@ def test_parcels(): assert prc != prc_other # test direct initialisation - axes.Parcels( + axes.ParcelsAxis( voxels=[np.ones((3, 2), dtype=int)], vertices=[{}], name=['single_voxel'], @@ -435,7 +435,7 @@ def test_parcels(): ) with assert_raises(ValueError): - axes.Parcels( + axes.ParcelsAxis( voxels=[np.ones((3, 2), dtype=int)], vertices=[{}], name=[['single_voxel']], # wrong shape name array @@ -446,11 +446,11 @@ def test_parcels(): def test_scalar(): """ - Test the introspection and creation of CIFTI-2 Scalar axes + Test the introspection and creation of CIFTI-2 ScalarAxis axes """ sc = get_scalar() assert len(sc) == 3 - assert isinstance(sc, axes.Scalar) + assert isinstance(sc, axes.ScalarAxis) assert (sc.name == ['one', 'two', 'three']).all() assert (sc.meta == [{}] * 3).all() assert sc[1] == ('two', {}) @@ -483,22 +483,22 @@ def test_scalar(): assert sc == sc_other # test constructor - assert axes.Scalar(['scalar_name'], [{}]) == axes.Scalar(['scalar_name']) + assert axes.ScalarAxis(['scalar_name'], [{}]) == axes.ScalarAxis(['scalar_name']) with assert_raises(ValueError): - axes.Scalar([['scalar_name']]) # wrong shape + axes.ScalarAxis([['scalar_name']]) # wrong shape with assert_raises(ValueError): - axes.Scalar(['scalar_name'], [{}, {}]) # wrong size + axes.ScalarAxis(['scalar_name'], [{}, {}]) # wrong size def test_label(): """ - Test the introspection and creation of CIFTI-2 Scalar axes + Test the introspection and creation of CIFTI-2 ScalarAxis axes """ lab = get_label() assert len(lab) == 3 - assert isinstance(lab, axes.Label) + assert isinstance(lab, axes.LabelAxis) assert (lab.name == ['one', 'two', 'three']).all() assert (lab.meta == [{}] * 3).all() assert (lab.label == [use_label] * 3).all() @@ -538,18 +538,18 @@ def test_label(): assert lab == other_lab # test constructor - assert axes.Label(['scalar_name'], [{}], [{}]) == axes.Label(['scalar_name'], [{}]) + assert axes.LabelAxis(['scalar_name'], [{}], [{}]) == axes.LabelAxis(['scalar_name'], [{}]) with assert_raises(ValueError): - axes.Label([['scalar_name']], [{}]) # wrong shape + axes.LabelAxis([['scalar_name']], [{}]) # wrong shape with assert_raises(ValueError): - axes.Label(['scalar_name'], [{}, {}]) # wrong size + axes.LabelAxis(['scalar_name'], [{}, {}]) # wrong size def test_series(): """ - Test the introspection and creation of CIFTI-2 Series axes + Test the introspection and creation of CIFTI-2 SeriesAxis axes """ sr = list(get_series()) assert sr[0].unit == 'SECOND' @@ -635,7 +635,7 @@ def test_common_interface(): concatenated = axis1 + axis2 assert axis1 != concatenated assert axis1 == concatenated[:axis1.size] - if isinstance(axis1, axes.Series): + if isinstance(axis1, axes.SeriesAxis): assert axis2 != concatenated[axis1.size:] else: assert axis2 == concatenated[axis1.size:] diff --git a/nibabel/cifti2/tests/test_cifti2io_axes.py b/nibabel/cifti2/tests/test_cifti2io_axes.py index e1025710ff..4089395b78 100644 --- a/nibabel/cifti2/tests/test_cifti2io_axes.py +++ b/nibabel/cifti2/tests/test_cifti2io_axes.py @@ -22,9 +22,9 @@ def check_hcp_grayordinates(brain_model): - """Checks that a BrainModel matches the expected 32k HCP grayordinates + """Checks that a BrainModelAxis matches the expected 32k HCP grayordinates """ - assert isinstance(brain_model, cifti2_axes.BrainModel) + assert isinstance(brain_model, cifti2_axes.BrainModelAxis) structures = list(brain_model.iter_structures()) assert len(structures) == len(hcp_labels) idx_start = 0 @@ -40,7 +40,7 @@ def check_hcp_grayordinates(brain_model): assert (bm.vertex == -1).all() assert (bm.affine == hcp_affine).all() assert bm.volume_shape == (91, 109, 91) - assert name == cifti2_axes.BrainModel.to_cifti_brain_structure_name(label) + assert name == cifti2_axes.BrainModelAxis.to_cifti_brain_structure_name(label) assert len(bm) == nel assert (bm.name == brain_model.name[idx_start:idx_start + nel]).all() assert (bm.voxel == brain_model.voxel[idx_start:idx_start + nel]).all() @@ -60,9 +60,9 @@ def check_hcp_grayordinates(brain_model): def check_Conte69(brain_model): - """Checks that the BrainModel matches the expected Conte69 surface coordinates + """Checks that the BrainModelAxis matches the expected Conte69 surface coordinates """ - assert isinstance(brain_model, cifti2_axes.BrainModel) + assert isinstance(brain_model, cifti2_axes.BrainModelAxis) structures = list(brain_model.iter_structures()) assert len(structures) == 2 assert structures[0][0] == 'CIFTI_STRUCTURE_CORTEX_LEFT' @@ -106,7 +106,7 @@ def test_read_ones(): arr = img.get_data() axes = [img.header.get_axis(dim) for dim in range(2)] assert (arr == 1).all() - assert isinstance(axes[0], cifti2_axes.Scalar) + assert isinstance(axes[0], cifti2_axes.ScalarAxis) assert len(axes[0]) == 1 assert axes[0].name[0] == 'ones' assert axes[0].meta[0] == {} @@ -120,7 +120,7 @@ def test_read_conte69_dscalar(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dscalar.nii')) arr = img.get_data() axes = [img.header.get_axis(dim) for dim in range(2)] - assert isinstance(axes[0], cifti2_axes.Scalar) + assert isinstance(axes[0], cifti2_axes.ScalarAxis) assert len(axes[0]) == 2 assert axes[0].name[0] == 'MyelinMap_BC_decurv' assert axes[0].name[1] == 'corrThickness' @@ -134,7 +134,7 @@ def test_read_conte69_dtseries(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dtseries.nii')) arr = img.get_data() axes = [img.header.get_axis(dim) for dim in range(2)] - assert isinstance(axes[0], cifti2_axes.Series) + assert isinstance(axes[0], cifti2_axes.SeriesAxis) assert len(axes[0]) == 2 assert axes[0].start == 0 assert axes[0].step == 1 @@ -149,7 +149,7 @@ def test_read_conte69_dlabel(): img = nib.load(os.path.join(test_directory, 'Conte69.parcellations_VGD11b.32k_fs_LR.dlabel.nii')) arr = img.get_data() axes = [img.header.get_axis(dim) for dim in range(2)] - assert isinstance(axes[0], cifti2_axes.Label) + assert isinstance(axes[0], cifti2_axes.LabelAxis) assert len(axes[0]) == 3 assert (axes[0].name == ['Composite Parcellation-lh (FRB08_OFP03_retinotopic)', 'Brodmann lh (from colin.R via pals_R-to-fs_LR)', 'MEDIAL WALL lh (fs_LR)']).all() @@ -164,7 +164,7 @@ def test_read_conte69_ptseries(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.ptseries.nii')) arr = img.get_data() axes = [img.header.get_axis(dim) for dim in range(2)] - assert isinstance(axes[0], cifti2_axes.Series) + assert isinstance(axes[0], cifti2_axes.SeriesAxis) assert len(axes[0]) == 2 assert axes[0].start == 0 assert axes[0].step == 1 diff --git a/nibabel/cifti2/tests/test_name.py b/nibabel/cifti2/tests/test_name.py index b656f88875..6b53d46523 100644 --- a/nibabel/cifti2/tests/test_name.py +++ b/nibabel/cifti2/tests/test_name.py @@ -12,7 +12,7 @@ def test_name_conversion(): """ Tests the automatic name conversion to a format recognized by CIFTI-2 """ - func = cifti2_axes.BrainModel.to_cifti_brain_structure_name + func = cifti2_axes.BrainModelAxis.to_cifti_brain_structure_name for base_name, input_names in equivalents: assert base_name == func(base_name) for name in input_names: From cffb8c0fb764ac6b94d6582c5e87bf295c65b296 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:02:18 +0000 Subject: [PATCH 063/689] DOC: test creation of `bm_thal` and `bm_cortex` --- nibabel/cifti2/cifti2_axes.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 90d3c6f4c4..eb8a628658 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -50,10 +50,14 @@ We can create brain models covering the left cortex and left thalamus using: >>> from nibabel import cifti2 ->>> bm_cortex = cifti2.BrainModelAxis.from_mask(cortex_mask, -... brain_structure='cortex_left') # doctest: +SKIP ->>> bm_thal = cifti2.BrainModelAxis.from_mask(thalamus_mask, affine=affine, -... brain_structure='thalamus_left') # doctest: +SKIP +>>> import numpy as np +>>> bm_cortex = cifti2.BrainModelAxis.from_mask([True, False, True, True], +... name='cortex_left') +>>> bm_thal = cifti2.BrainModelAxis.from_mask(np.ones((2, 2, 2)), affine=np.eye(4), +... name='thalamus_left') + +In this very simple case ``bm_cortex`` describes a left cortical surface skipping the second +out of four vertices. ``bm_thal`` contains all voxels in a 2x2x2 volume. Brain structure names automatically get converted to valid CIFTI-2 indentifiers using :meth:`BrainModelAxis.to_cifti_brain_structure_name`. @@ -332,6 +336,8 @@ def from_mask(cls, mask, name='other', affine=None): affine = np.asanyarray(affine) if affine.shape != (4, 4): raise ValueError("Affine transformation should be a 4x4 array or None, not %r" % affine) + + mask = np.asanyarray(mask) if mask.ndim == 1: return cls.from_surface(np.where(mask != 0)[0], mask.size, name=name) elif mask.ndim == 3: From 9b2276db70d5550324836e68550c527c2da31bc6 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:03:46 +0000 Subject: [PATCH 064/689] DOC: got rid of most of the :class:`Axis` in tutorial --- nibabel/cifti2/cifti2_axes.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index eb8a628658..aa9acb48a9 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -10,22 +10,22 @@ * :class:`LabelAxis`: each row/column has a unique name and label table (with optional meta-data) * :class:`SeriesAxis`: each row/column is a timepoint, which increases monotonically -All of these classes are derived from the :class:`Axis` class. +All of these classes are derived from the Axis class. After loading a CIFTI-2 file a tuple of axes describing the rows and columns can be obtained from the :meth:`.cifti2.Cifti2Header.get_axis` method on the header object (e.g. ``nibabel.load().header.get_axis()``). Inversely, a new -:class:`.cifti2.Cifti2Header` object can be created from existing :class:`Axis` objects +:class:`.cifti2.Cifti2Header` object can be created from existing Axis objects using the :meth:`.cifti2.Cifti2Header.from_axes` factory method. -CIFTI-2 :class:`Axis` objects of the same type can be concatenated using the '+'-operator. +CIFTI-2 Axis objects of the same type can be concatenated using the '+'-operator. Numpy indexing also works on axes (except for SeriesAxis objects, which have to remain monotonically increasing or decreasing). Creating new CIFTI-2 axes ----------------------- -New :class:`Axis` objects can be constructed by providing a description for what is contained -in each row/column of the described tensor. For each :class:`Axis` sub-class this descriptor is: +New Axis objects can be constructed by providing a description for what is contained +in each row/column of the described tensor. For each Axis sub-class this descriptor is: * :class:`BrainModelAxis`: a CIFTI-2 structure name and a voxel or vertex index * :class:`ParcelsAxis`: a name and a sequence of voxel and vertex indices From 110334bdaa8e4211e46c4819a8b48e722f85e1c5 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 27 Mar 2019 11:05:06 +0000 Subject: [PATCH 065/689] Update nibabel/cifti2/cifti2.py Co-Authored-By: MichielCottaar --- nibabel/cifti2/cifti2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 7ca5584bb1..66ac855cf0 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1276,7 +1276,7 @@ def get_axis(self, index): Returns ------- - axis : cifti2_axes.Axis + axis : :class:`.cifti2_axes.Axis` ''' from . import cifti2_axes return cifti2_axes.from_index_mapping(self.matrix.get_index_map(index)) From 063047fa9806a76fbdb1ec8a7347d4e849ef39c6 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 27 Mar 2019 11:08:20 +0000 Subject: [PATCH 066/689] Apply suggestions from code review Co-Authored-By: MichielCottaar --- nibabel/cifti2/cifti2.py | 4 ++-- nibabel/cifti2/cifti2_axes.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 66ac855cf0..8a4f12e767 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1288,7 +1288,7 @@ def from_axes(cls, axes): Parameters ---------- - axes : Tuple[cifti2_axes.Axis] + axes : tuple of :class`.cifti2_axes.Axis` sequence of Cifti2 axes describing each row/column of the matrix to be stored Returns @@ -1328,7 +1328,7 @@ def __init__(self, Object containing image data. It should be some object that returns an array from ``np.asanyarray``. It should have a ``shape`` attribute or property. - header : Cifti2Header instance or Sequence[cifti2_axes.Axis] + header : Cifti2Header instance or sequence of :class:`cifti2_axes.Axis` Header with data for / from XML part of CIFTI-2 format. Alternatively a sequence of cifti2_axes.Axis objects can be provided describing each dimension of the array. diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index aa9acb48a9..e9f56ae38f 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -124,11 +124,11 @@ def from_index_mapping(mim): Parameters ---------- - mim : cifti2.Cifti2MatrixIndicesMap + mim : :class:`.cifti2.Cifti2MatrixIndicesMap` Returns ------- - subtype of Axis + subclass of :class:`Axis` """ return_type = {'CIFTI_INDEX_TYPE_SCALARS': ScalarAxis, 'CIFTI_INDEX_TYPE_LABELS': LabelAxis, @@ -149,7 +149,7 @@ def to_header(axes): Returns ------- - cifti2.Cifti2Header + :class:`.cifti2.Cifti2Header` """ axes = tuple(axes) mims_all = [] @@ -777,7 +777,7 @@ def from_brain_models(cls, named_brain_models): for idx_parcel, (parcel_name, bm) in enumerate(named_brain_models): all_names.append(parcel_name) - voxels = bm.voxel[~bm.surface_mask] + voxels = bm.voxel[bm.volume_mask] if voxels.shape[0] != 0: if affine is None: affine = bm.affine From cefb8c6cf7c65c1a8fa077fec4821df6698070d8 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:10:57 +0000 Subject: [PATCH 067/689] RF: replace ~surface_mask with volume_mask --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index e9f56ae38f..df7269ce95 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -604,7 +604,7 @@ def __eq__(self, other): self.volume_shape == other.volume_shape) and self.nvertices == other.nvertices and np.array_equal(self.name, other.name) and - np.array_equal(self.voxel[~self.surface_mask], other.voxel[~other.surface_mask]) and + np.array_equal(self.voxel[self.volume_mask], other.voxel[self.volume_mask]) and np.array_equal(self.vertex[self.surface_mask], other.vertex[other.surface_mask]) ) From 0f0e1f746d9c5e2caaebd3e9c00c464e9a0b31af Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:17:22 +0000 Subject: [PATCH 068/689] DOC: added list of concrete classes to Axis object --- nibabel/cifti2/cifti2_axes.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index df7269ce95..00999dfe66 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -172,6 +172,14 @@ class Axis(object): Abstract class for any object describing the rows or columns of a CIFTI-2 vector/matrix Mainly used for type checking. + + Base class for the following concrete CIFTI-2 axes: + + * :class:`BrainModelAxis`: each row/column is a voxel or vertex + * :class:`ParcelsAxis`: each row/column is a group of voxels and/or vertices + * :class:`ScalarAxis`: each row/column has a unique name (with optional meta-data) + * :class:`LabelAxis`: each row/column has a unique name and label table (with optional meta-data) + * :class:`SeriesAxis`: each row/column is a timepoint, which increases monotonically """ @property From 0270ad9f8e2bd0e469f5fea6b914fcdca80e5eaa Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:31:17 +0000 Subject: [PATCH 069/689] BF: add name to return, so that link works in html --- nibabel/cifti2/cifti2_axes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 00999dfe66..77cf01427f 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -128,7 +128,7 @@ def from_index_mapping(mim): Returns ------- - subclass of :class:`Axis` + axis : subclass of :class:`Axis` """ return_type = {'CIFTI_INDEX_TYPE_SCALARS': ScalarAxis, 'CIFTI_INDEX_TYPE_LABELS': LabelAxis, @@ -149,7 +149,7 @@ def to_header(axes): Returns ------- - :class:`.cifti2.Cifti2Header` + header : :class:`.cifti2.Cifti2Header` """ axes = tuple(axes) mims_all = [] From ed764182d78c94824849b2b38bd874f1c83624f0 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:32:17 +0000 Subject: [PATCH 070/689] RF: fix line length --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 77cf01427f..ddcc59ad4f 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -178,7 +178,7 @@ class Axis(object): * :class:`BrainModelAxis`: each row/column is a voxel or vertex * :class:`ParcelsAxis`: each row/column is a group of voxels and/or vertices * :class:`ScalarAxis`: each row/column has a unique name (with optional meta-data) - * :class:`LabelAxis`: each row/column has a unique name and label table (with optional meta-data) + * :class:`LabelAxis`: each row/column has a unique name and label table with optional meta-data * :class:`SeriesAxis`: each row/column is a timepoint, which increases monotonically """ From d825a3c30cac875617c4e9cfa1f779b8ed7ca934 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Wed, 27 Mar 2019 11:33:00 +0000 Subject: [PATCH 071/689] DOC: make format in list of axes more consistent --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index ddcc59ad4f..e42aa42a1b 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -177,7 +177,7 @@ class Axis(object): * :class:`BrainModelAxis`: each row/column is a voxel or vertex * :class:`ParcelsAxis`: each row/column is a group of voxels and/or vertices - * :class:`ScalarAxis`: each row/column has a unique name (with optional meta-data) + * :class:`ScalarAxis`: each row/column has a unique name with optional meta-data * :class:`LabelAxis`: each row/column has a unique name and label table with optional meta-data * :class:`SeriesAxis`: each row/column is a timepoint, which increases monotonically """ From 1bc459e2c42405088e07e7514e47bcd629599517 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Mar 2019 13:02:00 -0400 Subject: [PATCH 072/689] DOCTEST: Drop doctest SKIP directives --- nibabel/cifti2/cifti2_axes.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index e42aa42a1b..2cd92ba8a0 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -67,7 +67,7 @@ These can be concatenated in a single brain model covering the left cortex and thalamus by simply adding them together ->>> bm_full = bm_cortex + bm_thal # doctest: +SKIP +>>> bm_full = bm_cortex + bm_thal Brain models covering the full HCP grayordinate space can be constructed by adding all the volumetric and surface brain models together like this (or by reading one from an already @@ -75,12 +75,12 @@ Getting a specific brain region from the full brain model is as simple as: ->>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_CORTEX_LEFT'] == bm_cortex # doctest: +SKIP ->>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_THALAMUS_LEFT'] == bm_thal # doctest: +SKIP +>>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_CORTEX_LEFT'] == bm_cortex +>>> assert bm_full[bm_full.name == 'CIFTI_STRUCTURE_THALAMUS_LEFT'] == bm_thal You can also iterate over all brain structures in a brain model: ->>> for name, slc, bm in bm_full.iter_structures(): ... # doctest: +SKIP +>>> for name, slc, bm in bm_full.iter_structures(): ... In this case there will be two iterations, namely: ('CIFTI_STRUCTURE_CORTEX_LEFT', slice(0, ), bm_cortex) @@ -93,23 +93,23 @@ ... ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices ... ('volume_parcel', bm_thal), # contains thalamus ... ('combined_parcel', bm_full[[1, 8, 10, 120, 127]) # contains selected voxels/vertices -... ]) # doctest: +SKIP +... ]) Time series are represented by their starting time (typically 0), step size (i.e. sampling time or TR), and number of elements: ->>> series = cifti2.SeriesAxis(start=0, step=100, size=5000) # doctest: +SKIP +>>> series = cifti2.SeriesAxis(start=0, step=100, size=5000) So a header for fMRI data with a TR of 100 ms covering the left cortex and thalamus with 5000 timepoints could be created with ->>> cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal)) # doctest: +SKIP +>>> cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal)) Similarly the curvature and cortical thickness on the left cortex could be stored using a header like: >>> cifti2.Cifti2Header.from_axes((cifti.ScalarAxis(['curvature', 'thickness'], -... bm_cortex)) # doctest: +SKIP +... bm_cortex)) """ import numpy as np from . import cifti2 From 04a4b45623caabf9617f4de2743d4ee67d249b85 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Mar 2019 13:18:20 -0400 Subject: [PATCH 073/689] FIX: Use other.volume_mask to index other.voxel --- nibabel/cifti2/cifti2_axes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 2cd92ba8a0..fe27c16035 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -612,7 +612,7 @@ def __eq__(self, other): self.volume_shape == other.volume_shape) and self.nvertices == other.nvertices and np.array_equal(self.name, other.name) and - np.array_equal(self.voxel[self.volume_mask], other.voxel[self.volume_mask]) and + np.array_equal(self.voxel[self.volume_mask], other.voxel[other.volume_mask]) and np.array_equal(self.vertex[self.surface_mask], other.vertex[other.surface_mask]) ) From 4aa360996a36a2bcba312be86ccb034a18b44400 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Mar 2019 13:31:29 -0400 Subject: [PATCH 074/689] DOC: Update docstrings with a few more links and array_like --- nibabel/cifti2/cifti2_axes.py | 69 ++++++++++++++++++----------------- 1 file changed, 35 insertions(+), 34 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index fe27c16035..6b4757347e 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -252,20 +252,20 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, Parameters ---------- - name : str or np.ndarray + name : array_like brain structure name or (N, ) string array with the brain structure names - voxel : np.ndarray + voxel : array_like, optional (N, 3) array with the voxel indices (can be omitted for CIFTI-2 files only covering the surface) - vertex : np.ndarray + vertex : array_like, optional (N, ) array with the vertex indices (can be omitted for volumetric CIFTI-2 files) - affine : np.ndarray + affine : array_like, optional (4, 4) array mapping voxel indices to mm space (not needed for CIFTI-2 files only covering the surface) - volume_shape : tuple of three integers + volume_shape : tuple of three integers, optional shape of the volume in which the voxels were defined (not needed for CIFTI-2 files only covering the surface) - nvertices : dict from string to integer + nvertices : dict from string to integer, optional maps names of surface elements to integers (not needed for volumetric CIFTI-2 files) """ if voxel is None: @@ -304,7 +304,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, if affine is None or volume_shape is None: raise ValueError("Affine and volume shape should be defined " "for BrainModelAxis containing voxels") - self.affine = affine + self.affine = np.asanyarray(affine) self.volume_shape = volume_shape if np.any(self.vertex[surface_mask] < 0): @@ -325,12 +325,12 @@ def from_mask(cls, mask, name='other', affine=None): Parameters ---------- - mask : np.ndarray + mask : array_like all non-zero voxels will be included in the BrainModelAxis axis should be (Nx, Ny, Nz) array for volume mask or (Nvertex, ) array for surface mask - name : str + name : str, optional Name of the brain structure (e.g. 'CortexRight', 'thalamus_left' or 'brain_stem') - affine : np.ndarray + affine : array_like, optional (4, 4) array with the voxel to mm transformation (defaults to identity matrix) Argument will be ignored for surface masks @@ -362,7 +362,7 @@ def from_surface(cls, vertices, nvertex, name='Other'): Parameters ---------- - vertices : np.ndarray + vertices : array_like indices of the vertices on the surface nvertex : int total number of vertices on the surface @@ -384,7 +384,7 @@ def from_index_mapping(cls, mim): Parameters ---------- - mim : cifti2.Cifti2MatrixIndicesMap + mim : :class:`.cifti2.Cifti2MatrixIndicesMap` Returns ------- @@ -422,7 +422,7 @@ def to_mapping(self, dim): Returns ------- - cifti2.Cifti2MatrixIndicesMap + :class:`.cifti2.Cifti2MatrixIndicesMap` """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_BRAIN_MODELS') for name, to_slice, bm in self.iter_structures(): @@ -720,22 +720,22 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert Parameters ---------- - name : np.ndarray + name : array_like (N, ) string array with the parcel names - voxels : np.ndarray + voxels : array_like (N, ) object array each containing a sequence of voxels. For each parcel the voxels are represented by a (M, 3) index array - vertices : np.ndarray + vertices : array_like (N, ) object array each containing a sequence of vertices. For each parcel the vertices are represented by a mapping from brain structure name to (M, ) index array - affine : np.ndarray + affine : array_like, optional (4, 4) array mapping voxel indices to mm space (not needed for CIFTI-2 files only covering the surface) - volume_shape : tuple of three integers + volume_shape : tuple of three integers, optional shape of the volume in which the voxels were defined (not needed for CIFTI-2 files only covering the surface) - nvertices : dict[String -> int] + nvertices : dict from string to integer, optional maps names of surface elements to integers (not needed for volumetric CIFTI-2 files) """ self.name = np.asanyarray(name, dtype='U') @@ -748,7 +748,7 @@ def __init__(self, name, voxels, vertices, affine=None, volume_shape=None, nvert voxels[idx] = as_array[idx] self.voxels = np.asanyarray(voxels, dtype='object') self.vertices = np.asanyarray(vertices, dtype='object') - self.affine = affine + self.affine = np.asanyarray(affine) if affine is not None else None self.volume_shape = volume_shape if nvertices is None: self.nvertices = {} @@ -813,7 +813,7 @@ def from_index_mapping(cls, mim): Parameters ---------- - mim : cifti2.Cifti2MatrixIndicesMap + mim : :class:`cifti2.Cifti2MatrixIndicesMap` Returns ------- @@ -859,7 +859,7 @@ def to_mapping(self, dim): Returns ------- - cifti2.Cifti2MatrixIndicesMap + :class:`cifti2.Cifti2MatrixIndicesMap` """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_PARCELS') if self.affine is not None: @@ -1008,7 +1008,8 @@ def get_element(self, index): tuple with 3 elements - unicode name of the parcel - (M, 3) int array with voxel indices - - Dict[String -> (K, ) int array] with vertex indices for a specific surface brain structure + - dict from string to (K, ) int array with vertex indices + for a specific surface brain structure """ return self.name[index], self.voxels[index], self.vertices[index] @@ -1023,9 +1024,9 @@ def __init__(self, name, meta=None): """ Parameters ---------- - name : np.ndarray of string + name : array_like (N, ) string array with the parcel names - meta : np.ndarray of dict + meta : array_like (N, ) object array with a dictionary of metadata for each row/column. Defaults to empty dictionary """ @@ -1046,7 +1047,7 @@ def from_index_mapping(cls, mim): Parameters ---------- - mim : cifti2.Cifti2MatrixIndicesMap + mim : :class:`.cifti2.Cifti2MatrixIndicesMap` Returns ------- @@ -1067,7 +1068,7 @@ def to_mapping(self, dim): Returns ------- - cifti2.Cifti2MatrixIndicesMap + :class:`.cifti2.Cifti2MatrixIndicesMap` """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_SCALARS') for name, meta in zip(self.name, self.meta): @@ -1151,13 +1152,13 @@ def __init__(self, name, label, meta=None): """ Parameters ---------- - name : np.ndarray + name : array_like (N, ) string array with the parcel names - label : np.ndarray + label : array_like single dictionary or (N, ) object array with dictionaries mapping from integers to (name, (R, G, B, A)), where name is a string and R, G, B, and A are floats between 0 and 1 giving the colour and alpha (i.e., transparency) - meta : np.ndarray + meta : array_like, optional (N, ) object array with a dictionary of metadata for each row/column """ self.name = np.asanyarray(name, dtype='U') @@ -1180,7 +1181,7 @@ def from_index_mapping(cls, mim): Parameters ---------- - mim : cifti2.Cifti2MatrixIndicesMap + mim : :class:`.cifti2.Cifti2MatrixIndicesMap` Returns ------- @@ -1202,7 +1203,7 @@ def to_mapping(self, dim): Returns ------- - cifti2.Cifti2MatrixIndicesMap + :class:`.cifti2.Cifti2MatrixIndicesMap` """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_LABELS') for name, label, meta in zip(self.name, self.label, self.meta): @@ -1324,7 +1325,7 @@ def from_index_mapping(cls, mim): Parameters ---------- - mim : cifti2.Cifti2MatrixIndicesMap + mim : :class:`.cifti2.Cifti2MatrixIndicesMap` Returns ------- @@ -1345,7 +1346,7 @@ def to_mapping(self, dim): Returns ------- - cifti2.Cifti2MatrixIndicesMap + :class:`cifti2.Cifti2MatrixIndicesMap` """ mim = cifti2.Cifti2MatrixIndicesMap([dim], 'CIFTI_INDEX_TYPE_SERIES') mim.series_exponent = 0 From 36c162d1f02043de251044d01007ea8780c39f02 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 28 Mar 2019 11:48:36 +0000 Subject: [PATCH 075/689] BF: doctest fixes for tutorial --- nibabel/cifti2/cifti2_axes.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 6b4757347e..3b0e9519f4 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -80,7 +80,12 @@ You can also iterate over all brain structures in a brain model: ->>> for name, slc, bm in bm_full.iter_structures(): ... +>>> for idx, (name, slc, bm) in enumerate(bm_full.iter_structures()): +... print(name, slc) +... assert bm == bm_full[slc] +... assert bm == bm_cortex if idx == 0 else bm_thal +CIFTI_STRUCTURE_CORTEX_LEFT slice(0, 3, None) +CIFTI_STRUCTURE_THALAMUS_LEFT slice(3, None, None) In this case there will be two iterations, namely: ('CIFTI_STRUCTURE_CORTEX_LEFT', slice(0, ), bm_cortex) @@ -90,9 +95,9 @@ ParcelsAxis can be constructed from selections of these brain models: >>> parcel = cifti2.ParcelsAxis.from_brain_models([ -... ('surface_parcel', bm_cortex[:100]), # contains first 100 cortical vertices +... ('surface_parcel', bm_cortex[:2]), # contains first 2 cortical vertices ... ('volume_parcel', bm_thal), # contains thalamus -... ('combined_parcel', bm_full[[1, 8, 10, 120, 127]) # contains selected voxels/vertices +... ('combined_parcel', bm_full[[1, 8, 10]]), # contains selected voxels/vertices ... ]) Time series are represented by their starting time (typically 0), step size @@ -103,13 +108,15 @@ So a header for fMRI data with a TR of 100 ms covering the left cortex and thalamus with 5000 timepoints could be created with ->>> cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal)) +>>> type(cifti2.Cifti2Header.from_axes((series, bm_cortex + bm_thal))) + Similarly the curvature and cortical thickness on the left cortex could be stored using a header like: ->>> cifti2.Cifti2Header.from_axes((cifti.ScalarAxis(['curvature', 'thickness'], -... bm_cortex)) +>>> type(cifti2.Cifti2Header.from_axes((cifti2.ScalarAxis(['curvature', 'thickness']), +... bm_cortex))) + """ import numpy as np from . import cifti2 From 0927424e6582750c5fb1556cdd87ebff4884b556 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 29 Mar 2019 10:19:08 +0000 Subject: [PATCH 076/689] BF: fixed doctest for python 2.7 --- nibabel/cifti2/cifti2_axes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 3b0e9519f4..30decec3d1 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -81,11 +81,11 @@ You can also iterate over all brain structures in a brain model: >>> for idx, (name, slc, bm) in enumerate(bm_full.iter_structures()): -... print(name, slc) +... print((str(name), slc)) ... assert bm == bm_full[slc] ... assert bm == bm_cortex if idx == 0 else bm_thal -CIFTI_STRUCTURE_CORTEX_LEFT slice(0, 3, None) -CIFTI_STRUCTURE_THALAMUS_LEFT slice(3, None, None) +('CIFTI_STRUCTURE_CORTEX_LEFT', slice(0, 3, None)) +('CIFTI_STRUCTURE_THALAMUS_LEFT', slice(3, None, None)) In this case there will be two iterations, namely: ('CIFTI_STRUCTURE_CORTEX_LEFT', slice(0, ), bm_cortex) From 1672e994233d9aaa8125f76c50731ec2d3f59e91 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 1 Apr 2019 10:18:01 -0400 Subject: [PATCH 077/689] DOC: Update changelog, zenodo --- .zenodo.json | 17 +++++++++++------ Changelog | 7 +++---- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 10d7a54128..e4ea56b108 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -38,6 +38,11 @@ "name": "Halchenko, Yaroslav O.", "orcid": "0000-0003-3456-2493" }, + { + "affiliation": "Wellcome Centre for Integrative Neuroimaging, University of Oxford, UK", + "name": "Cottaar, Michiel", + "orcid": "0000-0003-4679-7724" + }, { "affiliation": "MIT, HMS", "name": "Ghosh, Satrajit", @@ -82,11 +87,6 @@ { "name": "Moloney, Brendan" }, - { - "affiliation": "Wellcome Centre for Integrative Neuroimaging, University of Oxford, UK", - "name": "Cottaar, Michiel", - "orcid": "0000-0003-4679-7724" - }, { "name": "Burns, Christopher" }, @@ -163,7 +163,9 @@ "name": "Nguyen, Ly" }, { - "name": "Reddigari, Samir" + "affiliation": "BrainSpec, Boston, MA", + "name": "Reddigari, Samir", + "orcid": "0000-0003-1472-5881" }, { "name": "St-Jean, Samuel" @@ -210,6 +212,9 @@ "name": "Jordan, Kesshi", "orcid": "0000-0001-6313-0580" }, + { + "name": "Cieslak, Matt" + }, { "name": "Moreno, Miguel Estevan" }, diff --git a/Changelog b/Changelog index 0aae6b0212..6637d0b995 100644 --- a/Changelog +++ b/Changelog @@ -25,11 +25,13 @@ Eric Larson (EL), Demien Wasserman, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. -2.4.0 (Monday 25 March 2019) +2.4.0 (Monday 1 April 2019) ============================ New features ------------ +* Alternative ``Axis``-based interface for manipulating CIFTI-2 headers + (pr/641) (Michiel Cottaar, reviewed by Demien Wasserman, CM, SG) Enhancements ------------ @@ -62,9 +64,6 @@ Maintenance * Add Zenodo metadata, sorted by commits (pr/732) (CM + others) * Update author listing and copyrights (pr/742) (MB, reviewed by CM) -API changes and deprecations ----------------------------- - 2.3.3 (Wednesday 16 January 2019) ================================= From 53195805c2359f807b790e0b5e6912b0312f024d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 1 Apr 2019 10:23:16 -0400 Subject: [PATCH 078/689] DOC: Add Matt Cieslak to author list --- doc/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index d63844f712..453224bffb 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -95,6 +95,7 @@ contributed code and discussion (in rough order of appearance): * Soichi Hayashi * Samir Reddigari * Konstantinos Raktivan +* Matt Cieslak License reprise =============== From b58e2044b7dd0f755db7d141f6749dfabb3c5933 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 1 Apr 2019 22:19:00 -0400 Subject: [PATCH 079/689] MNT: Bump version to 2.4.1dev --- nibabel/info.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index b5c81c431c..0be9a9c3c9 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -18,9 +18,9 @@ # (pre-release) version. _version_major = 2 _version_minor = 4 -_version_micro = 0 -# _version_extra = 'dev' -_version_extra = '' +_version_micro = 1 +_version_extra = 'dev' +# _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From 6324e180ea89692bdd65b72eb54693ef8ef984ea Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 1 Apr 2019 22:20:00 -0400 Subject: [PATCH 080/689] MNT: Bump version to 2.5.0dev --- nibabel/info.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index b5c81c431c..a5b86ba790 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -17,10 +17,10 @@ # We usually use `dev` as `_version_extra` to label this as a development # (pre-release) version. _version_major = 2 -_version_minor = 4 +_version_minor = 5 _version_micro = 0 -# _version_extra = 'dev' -_version_extra = '' +_version_extra = 'dev' +# _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From cda524cf6586de7c55868526a52b1f71b21959be Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 22 Apr 2019 20:46:47 -0400 Subject: [PATCH 081/689] DOC: Update Sphinx config to support recent Sphinx/numpydoc --- doc-requirements.txt | 4 ++-- doc/source/_templates/reggie.html | 1 + doc/source/conf.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 doc/source/_templates/reggie.html diff --git a/doc-requirements.txt b/doc-requirements.txt index 348e6cce9a..59c87a4e6f 100644 --- a/doc-requirements.txt +++ b/doc-requirements.txt @@ -1,7 +1,7 @@ # Requirements for building docs -r requirements.txt -# Sphinx >= 1.6 breaks the math_dollar extension -sphinx<=1.5.6 +sphinx numpydoc texext matplotlib>=1.3 +mock diff --git a/doc/source/_templates/reggie.html b/doc/source/_templates/reggie.html new file mode 100644 index 0000000000..835c2570d3 --- /dev/null +++ b/doc/source/_templates/reggie.html @@ -0,0 +1 @@ +

Reggie -- the one

diff --git a/doc/source/conf.py b/doc/source/conf.py index 9a63ba7e32..d6e14e1a70 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -201,7 +201,7 @@ #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -html_sidebars = {'index': 'indexsidebar.html'} +html_sidebars = {'index': ['localtoc.html', 'relations.html', 'sourcelink.html', 'indexsidebar.html', 'searchbox.html', 'reggie.html']} # Additional templates that should be rendered to pages, maps page names to # template names. From 5edbbe68389bfd844bd251abb043060d8e4d7e52 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 24 Apr 2019 16:50:24 -0400 Subject: [PATCH 082/689] TEST: Verify writing to loaded data array fails --- nibabel/gifti/tests/test_parse_gifti_fast.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 9adc03d8fd..726779d988 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -268,6 +268,13 @@ def test_readwritedata(): assert_array_almost_equal(img.darrays[0].data, img2.darrays[0].data) +def test_modify_darray(): + for fname in (DATA_FILE1, DATA_FILE2, DATA_FILE5): + img = load(fname) + darray = img.darrays[0] + darray.data[:] = 0 + assert_true(np.array_equiv(darray.data, 0)) + def test_write_newmetadata(): img = gi.GiftiImage() From fcba2f916e4e27d2efed4f988aefd5af4bb33d11 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 24 Apr 2019 17:48:01 -0400 Subject: [PATCH 083/689] RF: Decode B64* data to bytearrays for writable numpy arrays --- nibabel/gifti/parse_gifti_fast.py | 48 +++++++++++++++---------------- 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/nibabel/gifti/parse_gifti_fast.py b/nibabel/gifti/parse_gifti_fast.py index de02f4c76b..f27e0725d6 100644 --- a/nibabel/gifti/parse_gifti_fast.py +++ b/nibabel/gifti/parse_gifti_fast.py @@ -33,43 +33,41 @@ class GiftiParseError(ExpatError): def read_data_block(encoding, endian, ordering, datatype, shape, data): """ Tries to unzip, decode, parse the funny string data """ - ord = array_index_order_codes.npcode[ordering] enclabel = gifti_encoding_codes.label[encoding] + dtype = data_type_codes.type[datatype] if enclabel == 'ASCII': # GIFTI_ENCODING_ASCII c = StringIO(data) - da = np.loadtxt(c) - da = da.astype(data_type_codes.type[datatype]) + da = np.loadtxt(c, dtype=dtype) return da # independent of the endianness - elif enclabel == 'B64BIN': - # GIFTI_ENCODING_B64BIN - dec = base64.b64decode(data.encode('ascii')) - dt = data_type_codes.type[datatype] - sh = tuple(shape) - newarr = np.frombuffer(dec, dtype=dt) - if len(newarr.shape) != len(sh): - newarr = newarr.reshape(sh, order=ord) - - elif enclabel == 'B64GZ': - # GIFTI_ENCODING_B64GZ - # convert to bytes array for python 3.2 - # http://www.diveintopython3.net/strings.html#byte-arrays - dec = base64.b64decode(data.encode('ascii')) - zdec = zlib.decompress(dec) - dt = data_type_codes.type[datatype] - sh = tuple(shape) - newarr = np.frombuffer(zdec, dtype=dt) - if len(newarr.shape) != len(sh): - newarr = newarr.reshape(sh, order=ord) - elif enclabel == 'External': # GIFTI_ENCODING_EXTBIN raise NotImplementedError("In what format are the external files?") - else: + elif enclabel not in ('B64BIN', 'B64GZ'): return 0 + # Numpy arrays created from bytes objects are read-only. + # Neither b64decode nor decompress will return bytearrays, and there + # are not equivalents to fobj.readinto to allow us to pass them, so + # there is not a simple way to avoid making copies. + # If this becomes a problem, we should write a decoding interface with + # a tunable chunk size. + dec = base64.b64decode(data.encode('ascii')) + if enclabel == 'B64BIN': + # GIFTI_ENCODING_B64BIN + buff = bytearray(dec) + else: + # GIFTI_ENCODING_B64GZ + buff = bytearray(zlib.decompress(dec)) + del dec + + sh = tuple(shape) + newarr = np.frombuffer(buff, dtype=dtype) + if len(newarr.shape) != len(sh): + newarr = newarr.reshape(sh, order=array_index_order_codes.npcode[ordering]) + # check if we need to byteswap required_byteorder = gifti_endian_codes.byteorder[endian] if (required_byteorder in ('big', 'little') and From 9cd82910db5b8affefaa8171027325e3dd5a18ff Mon Sep 17 00:00:00 2001 From: Egor Panfilov Date: Thu, 25 Apr 2019 12:41:27 +0300 Subject: [PATCH 084/689] Fixed typo in coordinate_systems doc page --- doc/source/coordinate_systems.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/coordinate_systems.rst b/doc/source/coordinate_systems.rst index ffb24a2e78..9f7aa1d8f4 100644 --- a/doc/source/coordinate_systems.rst +++ b/doc/source/coordinate_systems.rst @@ -23,8 +23,8 @@ their brain, a single EPI volume, and a structural scan. In general we never use the person's name in the image filenames, but we make an exception in this case: -* :download:`somones_epi.nii.gz `. -* :download:`somones_anatomy.nii.gz `. +* :download:`someones_epi.nii.gz `. +* :download:`someones_anatomy.nii.gz `. We can load up the EPI image to get the image data array: From 6baa31e8b9c89f2ba2931ce0b8f5150d43fd8ef8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 25 Apr 2019 07:48:10 -0400 Subject: [PATCH 085/689] MAINT: Update Zenodo, Changelog with errors/missing info --- .mailmap | 1 + .zenodo.json | 4 +++- Changelog | 6 +++--- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.mailmap b/.mailmap index d0f2bd6aff..54f6514527 100644 --- a/.mailmap +++ b/.mailmap @@ -42,6 +42,7 @@ Konstantinos Raktivan constracti Krish Subramaniam Krish Subramaniam Marc-Alexandre Côté Marc-Alexandre Cote Mathias Goncalves mathiasg +Matthew Cieslak Matt Cieslak Michael Hanke Michael Hanke Ly Nguyen lxn2 diff --git a/.zenodo.json b/.zenodo.json index e4ea56b108..562d42e00c 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -213,7 +213,9 @@ "orcid": "0000-0001-6313-0580" }, { - "name": "Cieslak, Matt" + "affiliation": "Department of Neuropsychiatry, University of Pennsylvania", + "name": "Cieslak, Matthew", + "orcid": "0000-0002-1931-4734" }, { "name": "Moreno, Miguel Estevan" diff --git a/Changelog b/Changelog index 6637d0b995..894256357b 100644 --- a/Changelog +++ b/Changelog @@ -21,7 +21,7 @@ Nibabel releases Most work on NiBabel so far has been by Matthew Brett (MB), Chris Markiewicz (CM), Michael Hanke (MH), Marc-Alexandre Côté (MC), Ben Cipollini (BC), Paul McCarthy (PM), Chris Cheng (CC), Yaroslav Halchenko (YOH), Satra Ghosh (SG), -Eric Larson (EL), Demien Wasserman, and Stephan Gerhard. +Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. @@ -31,7 +31,7 @@ References like "pr/298" refer to github pull request numbers. New features ------------ * Alternative ``Axis``-based interface for manipulating CIFTI-2 headers - (pr/641) (Michiel Cottaar, reviewed by Demien Wasserman, CM, SG) + (pr/641) (Michiel Cottaar, reviewed by Demian Wassermann, CM, SG) Enhancements ------------ @@ -232,7 +232,7 @@ Maintenance New features ------------ -* CIFTI support (pr/249) (SG, Michiel Cottaar, BC, CM, Demian Wasserman, MB) +* CIFTI support (pr/249) (SG, Michiel Cottaar, BC, CM, Demian Wassermann, MB) * Support for MRtrix TCK streamlines file format (pr/486) (MC, reviewed by MB, Arnaud Bore, J-Donald Tournier, Jean-Christophe Houde) * Added ``get_fdata()`` as default method to retrieve scaled floating point From 861e6d1148df4423135b69eac0e0661820d74eea Mon Sep 17 00:00:00 2001 From: Egor Panfilov Date: Thu, 25 Apr 2019 12:41:27 +0300 Subject: [PATCH 086/689] Fixed typo in coordinate_systems doc page --- doc/source/coordinate_systems.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/coordinate_systems.rst b/doc/source/coordinate_systems.rst index ffb24a2e78..9f7aa1d8f4 100644 --- a/doc/source/coordinate_systems.rst +++ b/doc/source/coordinate_systems.rst @@ -23,8 +23,8 @@ their brain, a single EPI volume, and a structural scan. In general we never use the person's name in the image filenames, but we make an exception in this case: -* :download:`somones_epi.nii.gz `. -* :download:`somones_anatomy.nii.gz `. +* :download:`someones_epi.nii.gz `. +* :download:`someones_anatomy.nii.gz `. We can load up the EPI image to get the image data array: From 2f7607506a41f80879b2561a548eb1154a70addc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 25 Apr 2019 10:29:23 -0400 Subject: [PATCH 087/689] FIX: Replace invalid MINC1 file with fixed file --- nibabel/tests/data/minc1-no-att.mnc | Bin 6872 -> 6872 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/nibabel/tests/data/minc1-no-att.mnc b/nibabel/tests/data/minc1-no-att.mnc index 1fcd595f7e560f74ba3cca8fb8ef5c4b5c27272b..b1ce938403419727ff92801c0b2b8a56b6cd7717 100644 GIT binary patch delta 20 ccmca%dc$pF From 6952c1fe8aad740a16d5d9d51f47afce7ec42aa8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 27 Apr 2019 21:50:11 -0400 Subject: [PATCH 088/689] TEST: Check for competing actions on warning filters --- nibabel/tests/test_volumeutils.py | 42 +++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 29c0edaf07..0c746c22d9 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -19,6 +19,8 @@ import itertools import gzip import bz2 +import threading +import time import numpy as np @@ -45,6 +47,7 @@ rec2dict, _dt_min_max, _write_data, + _ftype4scaled_finite, ) from ..openers import Opener, BZ2File from ..casting import (floor_log2, type_info, OK_FLOATS, shared_range) @@ -1245,3 +1248,42 @@ def read(self, n_bytes): 'Expected {0} bytes, got {1} bytes from {2}\n' ' - could the file be damaged?'.format( 11390625000000000000, 0, 'object')) + + +def test__ftype4scaled_finite_warningfilters(): + # This test checks our ability to properly manage the thread-unsafe + # warnings filter list. + # 32MiB reliably produces the error on my machine; use 128 for safety + shape = (1024, 1024, 32) + tst_arr = np.zeros(shape, dtype=np.float32) + # Ensure that an overflow will happen + tst_arr[0, 0, 0] = np.finfo(np.float32).max + tst_arr[-1, -1, -1] = np.finfo(np.float32).min + go = threading.Event() + stop = threading.Event() + err = [] + class MakeTotalDestroy(threading.Thread): + def run(self): + # Restore the warnings filters when we're done testing + with warnings.catch_warnings(): + go.set() + while not stop.is_set(): + warnings.filters[:] = [] + time.sleep(0.01) + class CheckScaling(threading.Thread): + def run(self): + go.wait() + try: + # Use float16 to buy us two failures + _ftype4scaled_finite(tst_arr, 2.0, 1.0, default=np.float16) + except Exception as e: + err.append(e) + stop.set() + thread_a = CheckScaling() + thread_b = MakeTotalDestroy() + thread_a.start() + thread_b.start() + thread_a.join() + thread_b.join() + if err: + raise err[0] From 26bb1204d4b78f109a30fc72ef522b4e78387dc1 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 25 Apr 2019 09:07:49 -0400 Subject: [PATCH 089/689] FIX: Safer warning registry manipulation when checking for overflows --- nibabel/volumeutils.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 2b8349d369..dcc1eeedea 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -10,6 +10,7 @@ from __future__ import division, print_function import sys +import re import warnings import gzip from collections import OrderedDict @@ -41,6 +42,13 @@ #: file-like classes known to hold compressed data COMPRESSED_FILE_LIKES = (gzip.GzipFile, BZ2File) +_OVERFLOW_FILTER = ( + 'ignore', + re.compile(r'.*overflow.*', re.IGNORECASE | re.UNICODE), + RuntimeWarning, + re.compile(r'', re.UNICODE), + 0) + class Recoder(object): ''' class to return canonical code(s) from code or aliases @@ -1334,7 +1342,9 @@ def _ftype4scaled_finite(tst_arr, slope, inter, direction='read', tst_arr = np.atleast_1d(tst_arr) slope = np.atleast_1d(slope) inter = np.atleast_1d(inter) - warnings.filterwarnings('ignore', '.*overflow.*', RuntimeWarning) + warnings.filters.insert(0, _OVERFLOW_FILTER) + getattr(warnings, '_filters_mutated', lambda: None)() # PY2 + # warnings._filters_mutated() # PY3 try: for ftype in OK_FLOATS[def_ind:]: tst_trans = tst_arr.copy() @@ -1353,7 +1363,12 @@ def _ftype4scaled_finite(tst_arr, slope, inter, direction='read', if np.all(np.isfinite(tst_trans)): return ftype finally: - warnings.filters.pop(0) + try: + warnings.filters.remove(_OVERFLOW_FILTER) + getattr(warnings, '_filters_mutated', lambda: None)() # PY2 + # warnings._filters_mutated() # PY3 + except ValueError: + pass raise ValueError('Overflow using highest floating point type') From 3d3381e10b1aa947b69705209a90184f0e6d58b9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 26 Apr 2019 10:28:12 -0400 Subject: [PATCH 090/689] RF: Use catch_warnings context managers to catch overflow --- nibabel/volumeutils.py | 48 +++++++++++++++++++----------------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index dcc1eeedea..bebb966fb5 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -10,7 +10,6 @@ from __future__ import division, print_function import sys -import re import warnings import gzip from collections import OrderedDict @@ -42,13 +41,6 @@ #: file-like classes known to hold compressed data COMPRESSED_FILE_LIKES = (gzip.GzipFile, BZ2File) -_OVERFLOW_FILTER = ( - 'ignore', - re.compile(r'.*overflow.*', re.IGNORECASE | re.UNICODE), - RuntimeWarning, - re.compile(r'', re.UNICODE), - 0) - class Recoder(object): ''' class to return canonical code(s) from code or aliases @@ -1342,32 +1334,36 @@ def _ftype4scaled_finite(tst_arr, slope, inter, direction='read', tst_arr = np.atleast_1d(tst_arr) slope = np.atleast_1d(slope) inter = np.atleast_1d(inter) - warnings.filters.insert(0, _OVERFLOW_FILTER) - getattr(warnings, '_filters_mutated', lambda: None)() # PY2 - # warnings._filters_mutated() # PY3 - try: - for ftype in OK_FLOATS[def_ind:]: - tst_trans = tst_arr.copy() - slope = slope.astype(ftype) - inter = inter.astype(ftype) + overflow_filter = ('error', '.*overflow.*', RuntimeWarning) + for ftype in OK_FLOATS[def_ind:]: + tst_trans = tst_arr.copy() + slope = slope.astype(ftype) + inter = inter.astype(ftype) + try: if direction == 'read': # as in reading of image from disk if slope != 1.0: - tst_trans = tst_trans * slope + # Keep warning contexts small to reduce the odds of a race + with warnings.catch_warnings(): + # Error on overflows to short circuit the logic + warnings.filterwarnings(*overflow_filter) + tst_trans = tst_trans * slope if inter != 0.0: - tst_trans = tst_trans + inter + with warnings.catch_warnings(): + warnings.filterwarnings(*overflow_filter) + tst_trans = tst_trans + inter elif direction == 'write': if inter != 0.0: - tst_trans = tst_trans - inter + with warnings.catch_warnings(): + warnings.filterwarnings(*overflow_filter) + tst_trans = tst_trans - inter if slope != 1.0: - tst_trans = tst_trans / slope + with warnings.catch_warnings(): + warnings.filterwarnings(*overflow_filter) + tst_trans = tst_trans / slope + # Double-check that result is finite if np.all(np.isfinite(tst_trans)): return ftype - finally: - try: - warnings.filters.remove(_OVERFLOW_FILTER) - getattr(warnings, '_filters_mutated', lambda: None)() # PY2 - # warnings._filters_mutated() # PY3 - except ValueError: + except RuntimeWarning: pass raise ValueError('Overflow using highest floating point type') From 143ab5d54509f292c183d57bfd95fb824cf3ebe6 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 27 Apr 2019 22:11:33 -0400 Subject: [PATCH 091/689] TEST: Clarify comments --- nibabel/tests/test_volumeutils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 0c746c22d9..e4c9521f5c 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -1256,7 +1256,7 @@ def test__ftype4scaled_finite_warningfilters(): # 32MiB reliably produces the error on my machine; use 128 for safety shape = (1024, 1024, 32) tst_arr = np.zeros(shape, dtype=np.float32) - # Ensure that an overflow will happen + # Ensure that an overflow will happen for < float64 tst_arr[0, 0, 0] = np.finfo(np.float32).max tst_arr[-1, -1, -1] = np.finfo(np.float32).min go = threading.Event() @@ -1274,7 +1274,7 @@ class CheckScaling(threading.Thread): def run(self): go.wait() try: - # Use float16 to buy us two failures + # Use float16 to ensure two failures and increase time in function _ftype4scaled_finite(tst_arr, 2.0, 1.0, default=np.float16) except Exception as e: err.append(e) From 9ce49182d4bd5e47ae874a598bfa24bf0d58453d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 17 May 2019 22:48:34 -0400 Subject: [PATCH 092/689] MAINT: Pacify upstream Deprecation/FutureWarnings --- nibabel/minc2.py | 2 +- nibabel/tests/test_orientations.py | 2 +- nibabel/viewers.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index f4ee3eab6b..7f4069d823 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -108,7 +108,7 @@ def _get_valid_range(self): def _get_scalar(self, var): """ Get scalar value from HDF5 scalar """ - return var.value + return var[()] def _get_array(self, var): """ Get array from HDF5 array """ diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 0605d33f20..798f595fc7 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -117,7 +117,7 @@ def same_transform(taff, ornt, shape): o2t_pts = np.dot(itaff[:3, :3], arr_pts) + itaff[:3, 3][:, None] assert np.allclose(np.round(o2t_pts), o2t_pts) # fancy index out the t_arr values - vals = t_arr[list(o2t_pts.astype('i'))] + vals = t_arr[tuple(o2t_pts.astype('i'))] return np.all(vals == arr.ravel()) diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 9c15625348..7a0f4d93d7 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -414,7 +414,7 @@ def _set_position(self, x, y, z, notify=True): idx = [slice(None)] * len(self._axes) for ii in range(3): idx[self._order[ii]] = self._data_idx[ii] - vdata = self._data[idx].ravel() + vdata = self._data[tuple(idx)].ravel() vdata = np.concatenate((vdata, [vdata[-1]])) self._volume_ax_objs['patch'].set_x(self._data_idx[3] - 0.5) self._volume_ax_objs['step'].set_ydata(vdata) From edfcdf05710fd6de6c89c0fa06ffd7f485fbf11e Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 18 May 2019 09:30:07 -0400 Subject: [PATCH 093/689] RF: De-duplicate - the vulnerable window stays small --- nibabel/volumeutils.py | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index bebb966fb5..95c7af3e45 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -1340,25 +1340,18 @@ def _ftype4scaled_finite(tst_arr, slope, inter, direction='read', slope = slope.astype(ftype) inter = inter.astype(ftype) try: - if direction == 'read': # as in reading of image from disk - if slope != 1.0: - # Keep warning contexts small to reduce the odds of a race - with warnings.catch_warnings(): - # Error on overflows to short circuit the logic - warnings.filterwarnings(*overflow_filter) + with warnings.catch_warnings(): + # Error on overflows to short circuit the logic + warnings.filterwarnings(*overflow_filter) + if direction == 'read': # as in reading of image from disk + if slope != 1.0: tst_trans = tst_trans * slope - if inter != 0.0: - with warnings.catch_warnings(): - warnings.filterwarnings(*overflow_filter) + if inter != 0.0: tst_trans = tst_trans + inter - elif direction == 'write': - if inter != 0.0: - with warnings.catch_warnings(): - warnings.filterwarnings(*overflow_filter) + elif direction == 'write': + if inter != 0.0: tst_trans = tst_trans - inter - if slope != 1.0: - with warnings.catch_warnings(): - warnings.filterwarnings(*overflow_filter) + if slope != 1.0: tst_trans = tst_trans / slope # Double-check that result is finite if np.all(np.isfinite(tst_trans)): From fb082fa20f502ad9808bb5feb302154e3ee6d0de Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 18 May 2019 13:31:46 -0400 Subject: [PATCH 094/689] TEST: Use realistic data in test, loop to ensure interference --- nibabel/tests/test_volumeutils.py | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index e4c9521f5c..06df9eb6ad 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -1253,12 +1253,12 @@ def read(self, n_bytes): def test__ftype4scaled_finite_warningfilters(): # This test checks our ability to properly manage the thread-unsafe # warnings filter list. - # 32MiB reliably produces the error on my machine; use 128 for safety - shape = (1024, 1024, 32) - tst_arr = np.zeros(shape, dtype=np.float32) + + # _ftype4scaled_finite always operates on one-or-two element arrays # Ensure that an overflow will happen for < float64 - tst_arr[0, 0, 0] = np.finfo(np.float32).max - tst_arr[-1, -1, -1] = np.finfo(np.float32).min + finfo = np.finfo(np.float32) + tst_arr = np.array((finfo.min, finfo.max), dtype=np.float32) + go = threading.Event() stop = threading.Event() err = [] @@ -1269,21 +1269,29 @@ def run(self): go.set() while not stop.is_set(): warnings.filters[:] = [] - time.sleep(0.01) + time.sleep(0) class CheckScaling(threading.Thread): def run(self): go.wait() - try: - # Use float16 to ensure two failures and increase time in function - _ftype4scaled_finite(tst_arr, 2.0, 1.0, default=np.float16) - except Exception as e: - err.append(e) + # Give ourselves a few bites at the apple + # 200 loops through the function takes ~10ms + # The highest number of iterations I've seen before hitting interference + # is 131, with 99% under 30, so this should be reasonably reliable. + for i in range(200): + try: + # Use float16 to ensure two failures and increase time in function + _ftype4scaled_finite(tst_arr, 2.0, 1.0, default=np.float16) + except Exception as e: + err.append(e) + break stop.set() + thread_a = CheckScaling() thread_b = MakeTotalDestroy() thread_a.start() thread_b.start() thread_a.join() thread_b.join() + if err: raise err[0] From fc5d67701962c2f9e8ada2ebe25ca2f4083a1138 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 18 May 2019 14:31:49 -0400 Subject: [PATCH 095/689] MAINT: Deprecate keep_file_open == "auto" (scheduled for 2.4.0) --- nibabel/analyze.py | 34 ++++++++--------- nibabel/arrayproxy.py | 65 ++++++++++++++++----------------- nibabel/brikhead.py | 47 ++++++++++++------------ nibabel/freesurfer/mghformat.py | 34 ++++++++--------- nibabel/spm99analyze.py | 18 ++++----- 5 files changed, 98 insertions(+), 100 deletions(-) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index ae7c8f69e6..90907544c2 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -935,7 +935,11 @@ def set_data_dtype(self, dtype): @classmethod @kw_only_meth(1) def from_file_map(klass, file_map, mmap=True, keep_file_open=None): - '''class method to create image from mapping in `file_map `` + ''' Class method to create image from mapping in ``file_map`` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. Parameters ---------- @@ -950,18 +954,14 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behaviour is the same as if - ``keep_file_open is False``. If ``file_map`` refers to an open - file handle, this setting has no effect. The default value - (``None``) will result in the value of + a new file handle is created every time the image is accessed. + If ``file_map`` refers to an open file handle, this setting has no + effect. The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. Returns @@ -991,7 +991,11 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): @classmethod @kw_only_meth(1) def from_filename(klass, filename, mmap=True, keep_file_open=None): - '''class method to create image from filename `filename` + '''Class method to create image from filename `filename` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. Parameters ---------- @@ -1004,17 +1008,13 @@ def from_filename(klass, filename, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behaviour is the same as if - ``keep_file_open is False``. The default value (``None``) will - result in the value of + a new file handle is created every time the image is accessed. + The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. Returns diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index c74386b0ac..79e5d87c10 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -27,6 +27,7 @@ """ from contextlib import contextmanager from threading import RLock +import warnings import numpy as np @@ -40,23 +41,22 @@ """This flag controls whether a new file handle is created every time an image is accessed through an ``ArrayProxy``, or a single file handle is created and used for the lifetime of the ``ArrayProxy``. It should be set to one of -``True``, ``False``, or ``'auto'``. +``True`` or ``False``. Management of file handles will be performed either by ``ArrayProxy`` objects, or by the ``indexed_gzip`` package if it is used. If this flag is set to ``True``, a single file handle is created and used. If -``False``, a new file handle is created every time the image is accessed. For -gzip files, if ``'auto'``, and the optional ``indexed_gzip`` dependency is -present, a single file handle is created and persisted. If ``indexed_gzip`` is -not available, behaviour is the same as if ``keep_file_open is False``. +``False``, a new file handle is created every time the image is accessed. +If this flag is set to ``'auto'``, a ``DeprecationWarning`` will be raised, which +will become a ``ValueError`` in nibabel 3.0.0. If this is set to any other value, attempts to create an ``ArrayProxy`` without specifying the ``keep_file_open`` flag will result in a ``ValueError`` being raised. -.. warning:: Setting this flag to a value of ``'auto'`` will become deprecated - behaviour in version 2.4.0. Support for ``'auto'`` will be removed +.. warning:: Setting this flag to a value of ``'auto'`` became deprecated + behaviour in version 2.4.1. Support for ``'auto'`` will be removed in version 3.0.0. """ KEEP_FILE_OPEN_DEFAULT = False @@ -100,6 +100,10 @@ class ArrayProxy(object): def __init__(self, file_like, spec, mmap=True, keep_file_open=None): """Initialize array proxy instance + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. + Parameters ---------- file_like : object @@ -127,18 +131,15 @@ def __init__(self, file_like, spec, mmap=True, keep_file_open=None): True gives the same behavior as ``mmap='c'``. If `file_like` cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behaviour is the same as if - ``keep_file_open is False``. If ``file_like`` is an open file - handle, this setting has no effect. The default value (``None``) - will result in the value of ``KEEP_FILE_OPEN_DEFAULT`` being used. + a new file handle is created every time the image is accessed. + If ``file_like`` is an open file handle, this setting has no + effect. The default value (``None``) will result in the value of + ``KEEP_FILE_OPEN_DEFAULT`` being used. """ if mmap not in (True, False, 'c', 'r'): raise ValueError("mmap should be one of {True, False, 'c', 'r'}") @@ -236,17 +237,9 @@ def _should_keep_file_open(self, file_like, keep_file_open): In this case, file handle management is delegated to the ``indexed_gzip`` library. - 5. If ``keep_file_open`` is ``'auto'``, ``file_like`` is a path to a - ``.gz`` file, and ``indexed_gzip`` is present, both internal flags - are set to ``True``. - - 6. If ``keep_file_open`` is ``'auto'``, and ``file_like`` is not a - path to a ``.gz`` file, or ``indexed_gzip`` is not present, both - internal flags are set to ``False``. - - Note that a value of ``'auto'`` for ``keep_file_open`` will become - deprecated behaviour in version 2.4.0, and support for ``'auto'`` will - be removed in version 3.0.0. + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will be removed in nibabel 3.0. Parameters ---------- @@ -266,20 +259,26 @@ def _should_keep_file_open(self, file_like, keep_file_open): """ if keep_file_open is None: keep_file_open = KEEP_FILE_OPEN_DEFAULT - if keep_file_open not in ('auto', True, False): - raise ValueError('keep_file_open should be one of {None, ' - '\'auto\', True, False}') + if keep_file_open == 'auto': + warnings.warn("Setting nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT to 'auto' is " + "deprecated and will become an error in v3.0.", DeprecationWarning) + if keep_file_open == 'auto': + warnings.warn("A value of 'auto' for keep_file_open is deprecated and will become an " + "error in v3.0. You probably want False.", DeprecationWarning) + elif keep_file_open not in (True, False): + raise ValueError('keep_file_open should be one of {None, True, False}') + # file_like is a handle - keep_file_open is irrelevant if hasattr(file_like, 'read') and hasattr(file_like, 'seek'): return False, False # if the file is a gzip file, and we have_indexed_gzip, have_igzip = openers.HAVE_INDEXED_GZIP and file_like.endswith('.gz') + # XXX Remove in v3.0 if keep_file_open == 'auto': return have_igzip, have_igzip - elif keep_file_open: - return True, True - else: - return False, have_igzip + + persist_opener = keep_file_open or have_igzip + return keep_file_open, persist_opener @property @deprecate_with_version('ArrayProxy.header deprecated', '2.2', '3.0') diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 9e521e61b6..1925f6afed 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -227,6 +227,10 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None): """ Initialize AFNI array proxy + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. + Parameters ---------- file_like : file-like object @@ -240,18 +244,14 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None): True gives the same behavior as ``mmap='c'``. If `file_like` cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behavior is the same as if - ``keep_file_open is False``. If ``file_like`` refers to an open - file handle, this setting has no effect. The default value - (``None``) will result in the value of + a new file handle is created every time the image is accessed. + If ``file_like`` refers to an open file handle, this setting has no + effect. The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used. """ super(AFNIArrayProxy, self).__init__(file_like, @@ -506,6 +506,10 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): """ Creates an AFNIImage instance from `file_map` + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. + Parameters ---------- file_map : dict @@ -518,18 +522,14 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : {None, 'auto', True, False}, optional, keyword only + keep_file_open : {None, True, False}, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behavior is the same as if - ``keep_file_open is False``. If ``file_like`` refers to an open - file handle, this setting has no effect. The default value - (``None``) will result in the value of + a new file handle is created every time the image is accessed. + If ``file_like`` refers to an open file handle, this setting has no + effect. The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used. """ with file_map['header'].get_prepare_fileobj('rt') as hdr_fobj: @@ -547,6 +547,10 @@ def from_filename(klass, filename, mmap=True, keep_file_open=None): """ Creates an AFNIImage instance from `filename` + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. + Parameters ---------- filename : str @@ -558,18 +562,13 @@ def from_filename(klass, filename, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : {None, 'auto', True, False}, optional, keyword only + keep_file_open : {None, True, False}, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behavior is the same as if - ``keep_file_open is False``. If ``file_like`` refers to an open - file handle, this setting has no effect. The default value - (``None``) will result in the value of + a new file handle is created every time the image is accessed. + The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used. """ file_map = klass.filespec_to_file_map(filename) diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index bf92bd962c..e71bfb5ef4 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -536,7 +536,11 @@ def filespec_to_file_map(klass, filespec): @classmethod @kw_only_meth(1) def from_file_map(klass, file_map, mmap=True, keep_file_open=None): - '''Load image from `file_map` + '''Load image from ``file_map`` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. Parameters ---------- @@ -550,18 +554,14 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behaviour is the same as if - ``keep_file_open is False``. If ``file_map`` refers to an open - file handle, this setting has no effect. The default value - (``None``) will result in the value of + a new file handle is created every time the image is accessed. + If ``file_map`` refers to an open file handle, this setting has no + effect. The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. ''' if mmap not in (True, False, 'c', 'r'): @@ -580,7 +580,11 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): @classmethod @kw_only_meth(1) def from_filename(klass, filename, mmap=True, keep_file_open=None): - '''class method to create image from filename `filename` + ''' Class method to create image from filename ``filename`` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. Parameters ---------- @@ -593,17 +597,13 @@ def from_filename(klass, filename, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behaviour is the same as if - ``keep_file_open is False``. The default value (``None``) will - result in the value of + a new file handle is created every time the image is accessed. + The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. Returns diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index 40ae4f44b8..d420bb0c2e 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -246,7 +246,11 @@ class Spm99AnalyzeImage(analyze.AnalyzeImage): @classmethod @kw_only_meth(1) def from_file_map(klass, file_map, mmap=True, keep_file_open=None): - '''class method to create image from mapping in `file_map `` + ''' Class method to create image from mapping in ``file_map`` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. Parameters ---------- @@ -261,18 +265,14 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): `mmap` value of True gives the same behavior as ``mmap='c'``. If image data file cannot be memory-mapped, ignore `mmap` value and read array from file. - keep_file_open : { None, 'auto', True, False }, optional, keyword only + keep_file_open : { None, True, False }, optional, keyword only `keep_file_open` controls whether a new file handle is created every time the image is accessed, or a single file handle is created and used for the lifetime of this ``ArrayProxy``. If ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. If - ``'auto'``, and the optional ``indexed_gzip`` dependency is - present, a single file handle is created and persisted. If - ``indexed_gzip`` is not available, behaviour is the same as if - ``keep_file_open is False``. If ``file_map`` refers to an open - file handle, this setting has no effect. The default value - (``None``) will result in the value of + a new file handle is created every time the image is accessed. + If ``file_map`` refers to an open file handle, this setting has no + effect. The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. Returns From ae053f5cc34c7ac403b21648e2b7fe9f3c61e023 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 17 May 2019 21:56:07 -0400 Subject: [PATCH 096/689] TEST: Test mmap parameter acceptance --- nibabel/tests/test_image_api.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index ba51878715..ad8ff1c7f6 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -416,6 +416,34 @@ def validate_shape_deprecated(self, imaker, params): assert_equal(img.get_shape(), params['shape']) assert_equal(len(w), 1) + def validate_mmap_parameter(self, imaker, params): + img = imaker() + fname = img.get_filename() + with InTemporaryDirectory(): + # Load test files with mmap parameters + # or + # Save a generated file so we can test it + if fname is None: + # Skip only formats we can't write + if not img.rw or not img.valid_exts: + return + fname = 'image' + img.valid_exts[0] + img.to_filename(fname) + rt_img = img.__class__.from_filename(fname, mmap=True) + assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) + rt_img = img.__class__.from_filename(fname, mmap=False) + assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) + rt_img = img.__class__.from_filename(fname, mmap='c') + assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) + rt_img = img.__class__.from_filename(fname, mmap='r') + assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) + # r+ is specifically not valid for images + assert_raises(ValueError, + img.__class__.from_filename, fname, mmap='r+') + assert_raises(ValueError, + img.__class__.from_filename, fname, mmap='invalid') + del rt_img # to allow windows to delete the directory + class HeaderShapeMixin(object): """ Tests that header shape can be set and got From e65d0d44ef90fa7db9f105ad116248f8e1e258e0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 17 May 2019 22:14:32 -0400 Subject: [PATCH 097/689] RF: Move mmap/keep_file_open parameters to DataobjImage.from_file* --- nibabel/analyze.py | 41 -------------- nibabel/brikhead.py | 36 ------------- nibabel/dataobj_images.py | 80 ++++++++++++++++++++++++++++ nibabel/freesurfer/mghformat.py | 54 ++++--------------- nibabel/tests/test_dataobj_images.py | 13 ++++- 5 files changed, 100 insertions(+), 124 deletions(-) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 90907544c2..8015715590 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -988,47 +988,6 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): 'file_map': copy_file_map(file_map)} return img - @classmethod - @kw_only_meth(1) - def from_filename(klass, filename, mmap=True, keep_file_open=None): - '''Class method to create image from filename `filename` - - .. deprecated:: 2.4.1 - ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. - - Parameters - ---------- - filename : str - Filename of image to load - mmap : {True, False, 'c', 'r'}, optional, keyword only - `mmap` controls the use of numpy memory mapping for reading image - array data. If False, do not try numpy ``memmap`` for data array. - If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A - `mmap` value of True gives the same behavior as ``mmap='c'``. If - image data file cannot be memory-mapped, ignore `mmap` value and - read array from file. - keep_file_open : { None, True, False }, optional, keyword only - `keep_file_open` controls whether a new file handle is created - every time the image is accessed, or a single file handle is - created and used for the lifetime of this ``ArrayProxy``. If - ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. - The default value (``None``) will result in the value of - ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. - - Returns - ------- - img : Analyze Image instance - ''' - if mmap not in (True, False, 'c', 'r'): - raise ValueError("mmap should be one of {True, False, 'c', 'r'}") - file_map = klass.filespec_to_file_map(filename) - return klass.from_file_map(file_map, mmap=mmap, - keep_file_open=keep_file_open) - - load = from_filename - @staticmethod def _get_fileholders(file_map): """ Return fileholder for header and image diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 1925f6afed..deba85bb58 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -541,40 +541,6 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): return klass(data, hdr.get_affine(), header=hdr, extra=None, file_map=file_map) - @classmethod - @kw_only_meth(1) - def from_filename(klass, filename, mmap=True, keep_file_open=None): - """ - Creates an AFNIImage instance from `filename` - - .. deprecated:: 2.4.1 - ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. - - Parameters - ---------- - filename : str - Path to BRIK or HEAD file to be loaded - mmap : {True, False, 'c', 'r'}, optional, keyword only - `mmap` controls the use of numpy memory mapping for reading image - array data. If False, do not try numpy ``memmap`` for data array. - If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A - `mmap` value of True gives the same behavior as ``mmap='c'``. If - image data file cannot be memory-mapped, ignore `mmap` value and - read array from file. - keep_file_open : {None, True, False}, optional, keyword only - `keep_file_open` controls whether a new file handle is created - every time the image is accessed, or a single file handle is - created and used for the lifetime of this ``ArrayProxy``. If - ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. - The default value (``None``) will result in the value of - ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used. - """ - file_map = klass.filespec_to_file_map(filename) - return klass.from_file_map(file_map, mmap=mmap, - keep_file_open=keep_file_open) - @classmethod def filespec_to_file_map(klass, filespec): """ @@ -620,7 +586,5 @@ def filespec_to_file_map(klass, filespec): file_map[key].filename = fname return file_map - load = from_filename - load = AFNIImage.load diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 86185a7aef..d7d082403d 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -11,6 +11,7 @@ import numpy as np from .filebasedimages import FileBasedImage +from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version @@ -404,3 +405,82 @@ def get_shape(self): """ Return shape for image """ return self.shape + + @classmethod + @kw_only_meth(1) + def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + ''' Class method to create image from mapping in ``file_map`` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. + + Parameters + ---------- + file_map : dict + Mapping with (kay, value) pairs of (``file_type``, FileHolder + instance giving file-likes for each file needed for this image + type. + mmap : {True, False, 'c', 'r'}, optional, keyword only + `mmap` controls the use of numpy memory mapping for reading image + array data. If False, do not try numpy ``memmap`` for data array. + If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A + `mmap` value of True gives the same behavior as ``mmap='c'``. If + image data file cannot be memory-mapped, ignore `mmap` value and + read array from file. + keep_file_open : { None, True, False }, optional, keyword only + `keep_file_open` controls whether a new file handle is created + every time the image is accessed, or a single file handle is + created and used for the lifetime of this ``ArrayProxy``. If + ``True``, a single file handle is created and used. If ``False``, + a new file handle is created every time the image is accessed. + If ``file_map`` refers to an open file handle, this setting has no + effect. The default value (``None``) will result in the value of + ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. + + Returns + ------- + img : DataobjImage instance + ''' + raise NotImplementedError + + @classmethod + @kw_only_meth(1) + def from_filename(klass, filename, mmap=True, keep_file_open=None): + '''Class method to create image from filename `filename` + + .. deprecated:: 2.4.1 + ``keep_file_open='auto'`` is redundant with `False` and has + been deprecated. It will raise an error in nibabel 3.0. + + Parameters + ---------- + filename : str + Filename of image to load + mmap : {True, False, 'c', 'r'}, optional, keyword only + `mmap` controls the use of numpy memory mapping for reading image + array data. If False, do not try numpy ``memmap`` for data array. + If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A + `mmap` value of True gives the same behavior as ``mmap='c'``. If + image data file cannot be memory-mapped, ignore `mmap` value and + read array from file. + keep_file_open : { None, True, False }, optional, keyword only + `keep_file_open` controls whether a new file handle is created + every time the image is accessed, or a single file handle is + created and used for the lifetime of this ``ArrayProxy``. If + ``True``, a single file handle is created and used. If ``False``, + a new file handle is created every time the image is accessed. + The default value (``None``) will result in the value of + ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. + + Returns + ------- + img : DataobjImage instance + ''' + if mmap not in (True, False, 'c', 'r'): + raise ValueError("mmap should be one of {True, False, 'c', 'r'}") + file_map = klass.filespec_to_file_map(filename) + return klass.from_file_map(file_map, mmap=mmap, + keep_file_open=keep_file_open) + + load = from_filename diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index e71bfb5ef4..8ff783a865 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -536,7 +536,7 @@ def filespec_to_file_map(klass, filespec): @classmethod @kw_only_meth(1) def from_file_map(klass, file_map, mmap=True, keep_file_open=None): - '''Load image from ``file_map`` + ''' Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has @@ -544,9 +544,10 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): Parameters ---------- - file_map : None or mapping, optional - files mapping. If None (default) use object's ``file_map`` - attribute instead + file_map : dict + Mapping with (kay, value) pairs of (``file_type``, FileHolder + instance giving file-likes for each file needed for this image + type. mmap : {True, False, 'c', 'r'}, optional, keyword only `mmap` controls the use of numpy memory mapping for reading image array data. If False, do not try numpy ``memmap`` for data array. @@ -563,6 +564,10 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): If ``file_map`` refers to an open file handle, this setting has no effect. The default value (``None``) will result in the value of ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. + + Returns + ------- + img : MGHImage instance ''' if mmap not in (True, False, 'c', 'r'): raise ValueError("mmap should be one of {True, False, 'c', 'r'}") @@ -577,47 +582,6 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): img = klass(data, affine, header, file_map=file_map) return img - @classmethod - @kw_only_meth(1) - def from_filename(klass, filename, mmap=True, keep_file_open=None): - ''' Class method to create image from filename ``filename`` - - .. deprecated:: 2.4.1 - ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. - - Parameters - ---------- - filename : str - Filename of image to load - mmap : {True, False, 'c', 'r'}, optional, keyword only - `mmap` controls the use of numpy memory mapping for reading image - array data. If False, do not try numpy ``memmap`` for data array. - If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A - `mmap` value of True gives the same behavior as ``mmap='c'``. If - image data file cannot be memory-mapped, ignore `mmap` value and - read array from file. - keep_file_open : { None, True, False }, optional, keyword only - `keep_file_open` controls whether a new file handle is created - every time the image is accessed, or a single file handle is - created and used for the lifetime of this ``ArrayProxy``. If - ``True``, a single file handle is created and used. If ``False``, - a new file handle is created every time the image is accessed. - The default value (``None``) will result in the value of - ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. - - Returns - ------- - img : MGHImage instance - ''' - if mmap not in (True, False, 'c', 'r'): - raise ValueError("mmap should be one of {True, False, 'c', 'r'}") - file_map = klass.filespec_to_file_map(filename) - return klass.from_file_map(file_map, mmap=mmap, - keep_file_open=keep_file_open) - - load = from_filename - def to_file_map(self, file_map=None): ''' Write image to `file_map` or contained ``self.file_map`` diff --git a/nibabel/tests/test_dataobj_images.py b/nibabel/tests/test_dataobj_images.py index 4c40ff9f17..e0f042939a 100644 --- a/nibabel/tests/test_dataobj_images.py +++ b/nibabel/tests/test_dataobj_images.py @@ -16,9 +16,18 @@ class DoNumpyImage(DataobjImage): files_types = (('image', '.npy'),) @classmethod - def from_file_map(klass, file_map): + def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + if mmap not in (True, False, 'c', 'r'): + raise ValueError("mmap should be one of {True, False, 'c', 'r'}") + if mmap is True: + mmap = 'c' + elif mmap is False: + mmap = None with file_map['image'].get_prepare_fileobj('rb') as fobj: - arr = np.load(fobj) + try: + arr = np.load(fobj, mmap=mmap) + except: + arr = np.load(fobj) return klass(arr) def to_file_map(self, file_map=None): From e72fad518576e5766fd455faf6e247130dbe4743 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 17 May 2019 22:15:43 -0400 Subject: [PATCH 098/689] FIX: Pass kwargs from Cifti2.from_file* to Nifti2.from_file* --- nibabel/cifti2/cifti2.py | 41 ++++++++-------------------------------- 1 file changed, 8 insertions(+), 33 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 8a4f12e767..0f8108b235 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -29,6 +29,7 @@ from ..dataobj_images import DataobjImage from ..nifti2 import Nifti2Image, Nifti2Header from ..arrayproxy import reshape_dataobj +from ..keywordonly import kw_only_meth def _float_01(val): @@ -1355,7 +1356,8 @@ def nifti_header(self): return self._nifti_header @classmethod - def from_file_map(klass, file_map): + @kw_only_meth(1) + def from_file_map(klass, file_map, mmap=True, keep_file_open=None): """ Load a CIFTI-2 image from a file_map Parameters @@ -1368,7 +1370,8 @@ def from_file_map(klass, file_map): Returns a Cifti2Image """ from .parse_cifti2 import _Cifti2AsNiftiImage, Cifti2Extension - nifti_img = _Cifti2AsNiftiImage.from_file_map(file_map) + nifti_img = _Cifti2AsNiftiImage.from_file_map(file_map, mmap=mmap, + keep_file_open=keep_file_open) # Get cifti2 header for item in nifti_img.header.extensions: @@ -1380,7 +1383,7 @@ def from_file_map(klass, file_map): 'extension') # Construct cifti image. - # User array proxy object where possible + # Use array proxy object where possible dataobj = nifti_img.dataobj return Cifti2Image(reshape_dataobj(dataobj, dataobj.shape[4:]), header=cifti_header, @@ -1455,33 +1458,5 @@ def set_data_dtype(self, dtype): self._nifti_header.set_data_dtype(dtype) -def load(filename): - """ Load cifti2 from `filename` - - Parameters - ---------- - filename : str - filename of image to be loaded - - Returns - ------- - img : Cifti2Image - cifti image instance - - Raises - ------ - ImageFileError: if `filename` doesn't look like cifti - IOError : if `filename` does not exist - """ - return Cifti2Image.from_filename(filename) - - -def save(img, filename): - """ Save cifti to `filename` - - Parameters - ---------- - filename : str - filename to which to save image - """ - Cifti2Image.instance_to_filename(img, filename) +load = Cifti2Image.from_filename +save = Cifti2Image.instance_to_filename From c1c427b4d613c28f0a1c9e0068f62f8ad6d10df0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 17 May 2019 22:16:37 -0400 Subject: [PATCH 099/689] FIX: Accept mmap keep_file_open for Ecat and Minc, with no effect --- nibabel/ecat.py | 4 +++- nibabel/minc1.py | 5 ++++- nibabel/minc2.py | 4 +++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 8713fc4ea2..e8b881bd66 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -54,6 +54,7 @@ from .arraywriters import make_array_writer from .wrapstruct import WrapStruct from .fileslice import canonical_slicers, predict_shape, slice2outax +from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version BLOCK_SIZE = 512 @@ -873,7 +874,8 @@ def _get_fileholders(file_map): return file_map['header'], file_map['image'] @classmethod - def from_file_map(klass, file_map): + @kw_only_meth(1) + def from_file_map(klass, file_map, mmap=True, keep_file_open=None): """class method to create image from mapping specified in file_map """ diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 57042f32f0..369922bf99 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -17,6 +17,7 @@ from .spatialimages import SpatialHeader, SpatialImage from .fileslice import canonical_slicers +from .keywordonly import kw_only_meth from .deprecated import FutureWarningMixin _dt_dict = { @@ -310,7 +311,9 @@ class Minc1Image(SpatialImage): ImageArrayProxy = MincImageArrayProxy @classmethod - def from_file_map(klass, file_map): + @kw_only_meth(1) + def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + # Note that mmap and keep_file_open are included for proper with file_map['image'].get_prepare_fileobj() as fobj: minc_file = Minc1File(netcdf_file(fobj)) affine = minc_file.get_affine() diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 7f4069d823..40f38f97b3 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -27,6 +27,7 @@ """ import numpy as np +from .keywordonly import kw_only_meth from .optpkg import optional_package h5py, have_h5py, setup_module = optional_package('h5py') @@ -158,7 +159,8 @@ class Minc2Image(Minc1Image): header_class = Minc2Header @classmethod - def from_file_map(klass, file_map): + @kw_only_meth(1) + def from_file_map(klass, file_map, mmap=True, keep_file_open=None): holder = file_map['image'] if holder.filename is None: raise MincError('MINC2 needs filename for load') From c639d9ab1abb717676197a6e69abb8b90589e9b4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 23 May 2019 14:54:54 -0400 Subject: [PATCH 100/689] DOC: Update changelog, author list --- Changelog | 32 ++++++++++++++++++++++++++++++++ doc/source/index.rst | 1 + 2 files changed, 33 insertions(+) diff --git a/Changelog b/Changelog index 894256357b..a6dc71a7e7 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,38 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. +2.4.1 (Monday 27 May 2019) +============================ + +Contributions from Egor Pafilov and Dave Allured. + +Enhancements +------------ +* Enable ``mmap``, ``keep_file_open`` options when loading any + ``DataobjImage`` (pr/759) (CM, reviewed by PM) + +Bug fixes +--------- +* Ensure loaded GIFTI files expose writable data arrays (pr/750) (CM, + reviewed by PM) +* Safer warning registry manipulation when checking for overflows (pr/753) + (CM, reviewed by MB) + +Maintenance +----------- +* Fix typo in coordinate systems doc (pr/751) (Egor Panfilov, reviewed by + CM) +* Replace invalid MINC1 test file with fixed file (pr/754) (Dave Allured + with CM) +* Update Sphinx config to support recent Sphinx/numpydoc (p4/749) (CM, + reviewed by PM) +* Pacify ``FutureWarning`` and ``DeprecationWarning`` from h5py, numpy + (pr/760) (CM) + +API changes and deprecations +---------------------------- +* Deprecate ``keep_file_open == 'auto'`` (pr/761) (CM, reviewed by PM) + 2.4.0 (Monday 1 April 2019) ============================ diff --git a/doc/source/index.rst b/doc/source/index.rst index 453224bffb..4c6201a780 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -96,6 +96,7 @@ contributed code and discussion (in rough order of appearance): * Samir Reddigari * Konstantinos Raktivan * Matt Cieslak +* Egor Pafilov License reprise =============== From 63b7a522034787a60bc4b4590cdd6fe659f0e329 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 23 May 2019 15:12:14 -0400 Subject: [PATCH 101/689] TEST: Verify we can write files with repeated labels --- nibabel/freesurfer/tests/test_io.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 1b6065f351..4f19f9e960 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -356,3 +356,17 @@ def test_label(): labels, scalars = read_label(label_path, True) assert_true(np.all(labels == label)) assert_true(len(labels) == len(scalars)) + + +def test_write_annot_maxstruct(): + """Verify we can write files with repeated labels - test by reading""" + with InTemporaryDirectory(): + nlabels = 3 + names = ['label {}'.format(l) for l in range(1, nlabels + 1)] + # max label < n_labels + labels = np.array([1, 1, 1], dtype=np.int32) + rgba = np.array(np.random.randint(0, 255, (nlabels, 4)), dtype=np.int32) + annot_path = 'c.annot' + + write_annot(annot_path, labels, rgba, names) + read_annot(annot_path) From 040a2f491f384ae610666a41dd3bfd7b6b807e23 Mon Sep 17 00:00:00 2001 From: Jath Palasubramaniam Date: Sat, 25 May 2019 00:43:02 +1000 Subject: [PATCH 102/689] Corrects import of MutableMapping class for python 3.3+ MutableMapping class is now imported from collections.abc instead of collections. This change was made to python standard library in version 3.3. collections.MutableMapping has been provided for backwards compatability up to python 3.7 but will no longer be available from python 3.8. A try/catch statement is used with the import to retain backwords compatibility with python version < 3.3 including python 2. --- nibabel/streamlines/tractogram.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index 67afb4b211..209ed27c26 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -1,9 +1,14 @@ import copy import numbers import numpy as np -import collections from warnings import warn +try: + from collections.abc import MutableMapping +except ImportError: + # PY2 compatibility + from collections import MutableMapping + from nibabel.affines import apply_affine from .array_sequence import ArraySequence @@ -19,7 +24,7 @@ def is_lazy_dict(obj): return is_data_dict(obj) and callable(list(obj.store.values())[0]) -class SliceableDataDict(collections.MutableMapping): +class SliceableDataDict(MutableMapping): """ Dictionary for which key access can do slicing on the values. This container behaves like a standard dictionary but extends key access to @@ -181,7 +186,7 @@ def _extend_entry(self, key, value): self[key].extend(value) -class LazyDict(collections.MutableMapping): +class LazyDict(MutableMapping): """ Dictionary of generator functions. This container behaves like a dictionary but it makes sure its elements are From e018adfbb1f1857a94cd01a9ab13c4530f037e6e Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 24 May 2019 17:44:04 -0400 Subject: [PATCH 103/689] FIX: Correctly write .annot files with duplicate labels --- nibabel/freesurfer/io.py | 2 +- nibabel/freesurfer/tests/test_io.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index edce19c6cd..9dda179d1c 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -561,7 +561,7 @@ def write_string(s): write(-2) # maxstruc - write(np.max(labels) + 1) + write(max(np.max(labels) + 1, ctab.shape[0])) # File of LUT is unknown. write_string('NOFILE') diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 4f19f9e960..53d60acacd 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -359,7 +359,7 @@ def test_label(): def test_write_annot_maxstruct(): - """Verify we can write files with repeated labels - test by reading""" + """Test writing ANNOT files with repeated labels""" with InTemporaryDirectory(): nlabels = 3 names = ['label {}'.format(l) for l in range(1, nlabels + 1)] From 929dbc370bcd83ba599be60fdf53941a5bf3ad6f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 24 May 2019 20:39:48 -0400 Subject: [PATCH 104/689] TEST: Add round-trip test --- nibabel/freesurfer/tests/test_io.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 53d60acacd..51fe123025 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -12,7 +12,7 @@ from nose.tools import assert_true import numpy as np -from numpy.testing import assert_equal, assert_raises, dec, assert_allclose +from numpy.testing import assert_equal, assert_raises, dec, assert_allclose, assert_array_equal from .. import (read_geometry, read_morph_data, read_annot, read_label, write_geometry, write_morph_data, write_annot) @@ -369,4 +369,9 @@ def test_write_annot_maxstruct(): annot_path = 'c.annot' write_annot(annot_path, labels, rgba, names) - read_annot(annot_path) + # Validate the file can be read + rt_labels, rt_ctab, rt_names = read_annot(annot_path) + # Check round-trip + assert_array_equal(labels, rt_labels) + assert_array_equal(rgba, rt_ctab[:, :4]) + assert_equal(names, [n.decode('ascii') for n in rt_names]) From 84cafbc1042f284a841ec58df627b8c93d74baac Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 25 May 2019 15:10:59 -0400 Subject: [PATCH 105/689] DOC: Update long description to include CIFTI-2, BRIK/HEAD --- README.rst | 10 ++++++---- nibabel/info.py | 10 ++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/README.rst b/README.rst index fc3c3dd70f..1afdbc511a 100644 --- a/README.rst +++ b/README.rst @@ -18,14 +18,16 @@ Read / write access to some common neuroimaging file formats This package provides read +/- write access to some common medical and neuroimaging file formats, including: ANALYZE_ (plain, SPM99, SPM2 and later), -GIFTI_, NIfTI1_, NIfTI2_, MINC1_, MINC2_, MGH_ and ECAT_ as well as Philips -PAR/REC. We can read and write FreeSurfer_ geometry, annotation and -morphometry files. There is some very limited support for DICOM_. NiBabel is -the successor of PyNIfTI_. +GIFTI_, NIfTI1_, NIfTI2_, `CIFTI-2`_, MINC1_, MINC2_, `AFNI BRIK/HEAD`_, MGH_ and +ECAT_ as well as Philips PAR/REC. We can read and write FreeSurfer_ geometry, +annotation and morphometry files. There is some very limited support for +DICOM_. NiBabel is the successor of PyNIfTI_. .. _ANALYZE: http://www.grahamwideman.com/gw/brain/analyze/formatdoc.htm +.. _AFNI BRIK/HEAD: https://afni.nimh.nih.gov/pub/dist/src/README.attributes .. _NIfTI1: http://nifti.nimh.nih.gov/nifti-1/ .. _NIfTI2: http://nifti.nimh.nih.gov/nifti-2/ +.. _CIFTI-2: https://www.nitrc.org/projects/cifti/ .. _MINC1: https://en.wikibooks.org/wiki/MINC/Reference/MINC1_File_Format_Reference .. _MINC2: diff --git a/nibabel/info.py b/nibabel/info.py index 0be9a9c3c9..bccc4d03da 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -108,14 +108,16 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): This package provides read +/- write access to some common medical and neuroimaging file formats, including: ANALYZE_ (plain, SPM99, SPM2 and later), -GIFTI_, NIfTI1_, NIfTI2_, MINC1_, MINC2_, MGH_ and ECAT_ as well as Philips -PAR/REC. We can read and write FreeSurfer_ geometry, annotation and -morphometry files. There is some very limited support for DICOM_. NiBabel is -the successor of PyNIfTI_. +GIFTI_, NIfTI1_, NIfTI2_, `CIFTI-2`_, MINC1_, MINC2_, `AFNI BRIK/HEAD`_, MGH_ and +ECAT_ as well as Philips PAR/REC. We can read and write FreeSurfer_ geometry, +annotation and morphometry files. There is some very limited support for +DICOM_. NiBabel is the successor of PyNIfTI_. .. _ANALYZE: http://www.grahamwideman.com/gw/brain/analyze/formatdoc.htm +.. _AFNI BRIK/HEAD: https://afni.nimh.nih.gov/pub/dist/src/README.attributes .. _NIfTI1: http://nifti.nimh.nih.gov/nifti-1/ .. _NIfTI2: http://nifti.nimh.nih.gov/nifti-2/ +.. _CIFTI-2: https://www.nitrc.org/projects/cifti/ .. _MINC1: https://en.wikibooks.org/wiki/MINC/Reference/MINC1_File_Format_Reference .. _MINC2: From 5e40501059a4c0b3635d790aa9da9cba3f54d9f2 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 25 May 2019 17:19:56 -0400 Subject: [PATCH 106/689] DOC: Update changelog --- Changelog | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Changelog b/Changelog index a6dc71a7e7..e2f621e4cb 100644 --- a/Changelog +++ b/Changelog @@ -28,7 +28,8 @@ References like "pr/298" refer to github pull request numbers. 2.4.1 (Monday 27 May 2019) ============================ -Contributions from Egor Pafilov and Dave Allured. +Contributions from Egor Pafilov, Jath Palasubramaniam, Richard Nemec, and +Dave Allured. Enhancements ------------ @@ -41,6 +42,8 @@ Bug fixes reviewed by PM) * Safer warning registry manipulation when checking for overflows (pr/753) (CM, reviewed by MB) +* Correctly write .annot files with duplicate lables (pr/763) (Richard Nemec + with CM) Maintenance ----------- @@ -48,10 +51,12 @@ Maintenance CM) * Replace invalid MINC1 test file with fixed file (pr/754) (Dave Allured with CM) -* Update Sphinx config to support recent Sphinx/numpydoc (p4/749) (CM, +* Update Sphinx config to support recent Sphinx/numpydoc (pr/749) (CM, reviewed by PM) * Pacify ``FutureWarning`` and ``DeprecationWarning`` from h5py, numpy (pr/760) (CM) +* Accommodate Python 3.8 deprecation of collections.MutableMapping + (pr/762) (Jath Palasubramaniam, reviewed by CM) API changes and deprecations ---------------------------- From 79392b07acfcf45d030bead64dbfe0d7420c57bc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 25 May 2019 17:21:47 -0400 Subject: [PATCH 107/689] DOC: Update author list --- doc/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index 4c6201a780..ce1db32b86 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -97,6 +97,7 @@ contributed code and discussion (in rough order of appearance): * Konstantinos Raktivan * Matt Cieslak * Egor Pafilov +* Jath Palasubramaniam License reprise =============== From a3fd7cdfb52e9c28170ca5fc891a7127f005cb99 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 26 Apr 2019 07:39:33 -0400 Subject: [PATCH 108/689] DEP: Deprecate nibabel.trackvis, effect old FutureWarning threats --- nibabel/__init__.py | 2 +- nibabel/tests/test_trackvis.py | 40 +++++--------------- nibabel/trackvis.py | 68 ++++++++++++++-------------------- 3 files changed, 38 insertions(+), 72 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index fca22ccc99..d293b65482 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -76,7 +76,7 @@ def setup_test(): flip_axis, OrientationError, apply_orientation, aff2axcodes) from .imageclasses import class_map, ext_map, all_image_classes -from . import trackvis +trackvis = _ModuleProxy('nibabel.trackvis') from . import mriutils from . import streamlines from . import viewers diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index 9f8d84946c..96f96a3f44 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -10,9 +10,9 @@ from ..orientations import aff2axcodes from ..volumeutils import native_code, swapped_code -from nose.tools import assert_true, assert_false, assert_equal, assert_raises -from numpy.testing import assert_array_equal, assert_array_almost_equal -from ..testing import error_warnings, suppress_warnings +from numpy.testing import assert_array_almost_equal +from ..testing import (assert_true, assert_false, assert_equal, assert_raises, assert_warns, + assert_array_equal, suppress_warnings) def test_write(): @@ -217,8 +217,7 @@ def _rt(streams, hdr, points_space): assert_raises(tv.HeaderError, tv.read, out_f, False, 'voxel') # There's a warning for any voxel sizes == 0 hdr = {'voxel_size': [2, 3, 0]} - with error_warnings(): - assert_raises(UserWarning, _rt, vx_streams, hdr, 'voxel') + assert_warns(UserWarning, _rt, vx_streams, hdr, 'voxel') # This should be OK hdr = {'voxel_size': [2, 3, 4]} (raw_streams, hdr), (proc_streams, _) = _rt(vx_streams, hdr, 'voxel') @@ -305,9 +304,8 @@ def test__check_hdr_points_space(): tv._check_hdr_points_space, hdr, 'voxel') # Warning here only hdr['voxel_size'] = [2, 3, 0] - with error_warnings(): - assert_raises(UserWarning, - tv._check_hdr_points_space, hdr, 'voxel') + assert_warns(UserWarning, + tv._check_hdr_points_space, hdr, 'voxel') # This is OK hdr['voxel_size'] = [2, 3, 4] assert_equal(tv._check_hdr_points_space(hdr, 'voxel'), None) @@ -370,10 +368,6 @@ def test_empty_header(): def test_get_affine(): # Test get affine behavior, including pending deprecation hdr = tv.empty_header() - # Using version 1 affine is not a good idea because is fragile and not - # very useful. The default atleast_v2=None mode raises a FutureWarning - with error_warnings(): - assert_raises(FutureWarning, tv.aff_from_hdr, hdr) # testing the old behavior old_afh = partial(tv.aff_from_hdr, atleast_v2=False) # default header gives useless affine @@ -421,9 +415,8 @@ def test_get_affine(): assert_equal(hdr['voxel_order'], o_codes) # Check it came back the way we wanted assert_array_equal(old_afh(hdr), in_aff) - # Check that the default case matches atleast_v2=False case - with suppress_warnings(): - assert_array_equal(tv.aff_from_hdr(hdr), flipped_aff) + # Check that v1 header raises error + assert_raises(tv.HeaderError, tv.aff_from_hdr, hdr) # now use the easier vox_to_ras field hdr = tv.empty_header() aff = np.eye(4) @@ -455,15 +448,7 @@ def test_aff_to_hdr(): # Historically we flip the first axis if there is a negative determinant assert_array_almost_equal(hdr['voxel_size'], [-1, 2, 3]) assert_array_almost_equal(tv.aff_from_hdr(hdr, atleast_v2=False), aff2) - # Test that default mode raises DeprecationWarning - with error_warnings(): - assert_raises(FutureWarning, tv.aff_to_hdr, affine, hdr) - assert_raises(FutureWarning, tv.aff_to_hdr, affine, hdr, None, None) - assert_raises(FutureWarning, tv.aff_to_hdr, affine, hdr, False, None) - assert_raises(FutureWarning, tv.aff_to_hdr, affine, hdr, None, False) - # And has same effect as above - with suppress_warnings(): - tv.aff_to_hdr(affine, hdr) + tv.aff_to_hdr(affine, hdr, pos_vox=False, set_order=False) assert_array_almost_equal(tv.aff_from_hdr(hdr, atleast_v2=False), affine) # Check pos_vox and order flags for hdr in ({}, {'version': 2}, {'version': 1}): @@ -515,13 +500,6 @@ def test_tv_class(): affine = np.diag([1, 2, 3, 1]) affine[:3, 3] = [10, 11, 12] # affine methods will raise same warnings and errors as function - with error_warnings(): - assert_raises(FutureWarning, tvf.set_affine, affine) - assert_raises(FutureWarning, tvf.set_affine, affine, None, None) - assert_raises(FutureWarning, tvf.set_affine, affine, False, None) - assert_raises(FutureWarning, tvf.set_affine, affine, None, False) - assert_raises(FutureWarning, tvf.get_affine) - assert_raises(FutureWarning, tvf.get_affine, None) tvf.set_affine(affine, pos_vox=True, set_order=True) aff = tvf.get_affine(atleast_v2=True) assert_array_almost_equal(aff, affine) diff --git a/nibabel/trackvis.py b/nibabel/trackvis.py index 7da4ffcbe1..233e10ba79 100644 --- a/nibabel/trackvis.py +++ b/nibabel/trackvis.py @@ -17,12 +17,18 @@ from .openers import ImageOpener from .orientations import aff2axcodes from .affines import apply_affine +from .deprecated import deprecate_with_version try: basestring except NameError: # python 3 basestring = str +warnings.warn("The trackvis interface has been deprecated and will be removed " + "in v4.0; please use the 'nibabel.streamlines' interface.", + DeprecationWarning, + stacklevel=2) + # Definition of trackvis header structure. # See http://www.trackvis.org/docs/?subsect=fileformat # See https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html @@ -99,6 +105,9 @@ class DataError(Exception): """ +@deprecate_with_version('trackvis.read is deprecated; please use ' + 'nibabel.streamlines.load, instead.', + since='2.5.0', until='4.0.0') def read(fileobj, as_generator=False, points_space=None, strict=True): ''' Read trackvis file from `fileobj`, return `streamlines`, `header` @@ -254,6 +263,9 @@ def track_gen(): return streamlines, hdr +@deprecate_with_version('trackvis.write is deprecated; please use ' + 'nibabel.streamlines.save, instead.', + since='2.5.0', until='4.0.0') def write(fileobj, streamlines, hdr_mapping=None, endianness=None, points_space=None): ''' Write header and `streamlines` to trackvis file `fileobj` @@ -536,6 +548,9 @@ def _hdr_from_mapping(hdr=None, mapping=None, endianness=native_code): return hdr +@deprecate_with_version('empty_header is deprecated; please use ' + 'nibabel.streamlines.TrkFile.create_empty_header, instead.', + since='2.5.0', until='4.0.0') def empty_header(endianness=None, version=2): ''' Empty trackvis header @@ -590,7 +605,10 @@ def empty_header(endianness=None, version=2): return hdr -def aff_from_hdr(trk_hdr, atleast_v2=None): +@deprecate_with_version('aff_from_hdr is deprecated; please use ' + 'nibabel.streamlines.trk.get_affine_trackvis_to_rasmm, instead.', + since='2.5.0', until='4.0.0') +def aff_from_hdr(trk_hdr, atleast_v2=True): ''' Return voxel to mm affine from trackvis header Affine is mapping from voxel space to Nifti (RAS) output coordinate @@ -625,12 +643,6 @@ def aff_from_hdr(trk_hdr, atleast_v2=None): origin field to 0. In future, we'll raise an error rather than try and estimate the affine from version 1 fields ''' - if atleast_v2 is None: - warnings.warn('Defaulting to `atleast_v2` of False. Future versions ' - 'will default to True', - FutureWarning, - stacklevel=2) - atleast_v2 = False if trk_hdr['version'] == 2: aff = trk_hdr['vox_to_ras'] if aff[3, 3] != 0: @@ -673,7 +685,10 @@ def aff_from_hdr(trk_hdr, atleast_v2=None): return aff -def aff_to_hdr(affine, trk_hdr, pos_vox=None, set_order=None): +@deprecate_with_version('aff_to_hdr is deprecated; please use the ' + 'nibabel.streamlines.TrkFile.affine_to_rasmm property, instead.', + since='2.5.0', until='4.0.0') +def aff_to_hdr(affine, trk_hdr, pos_vox=True, set_order=True): ''' Set affine `affine` into trackvis header `trk_hdr` Affine is mapping from voxel space to Nifti RAS) output coordinate @@ -715,18 +730,6 @@ def aff_to_hdr(affine, trk_hdr, pos_vox=None, set_order=None): application). The application also ignores the origin field, and may not use the 'image_orientation_patient' field. ''' - if pos_vox is None: - warnings.warn('Default for ``pos_vox`` will change to True in ' - 'future versions of nibabel', - FutureWarning, - stacklevel=2) - pos_vox = False - if set_order is None: - warnings.warn('Default for ``set_order`` will change to True in ' - 'future versions of nibabel', - FutureWarning, - stacklevel=2) - set_order = False try: version = trk_hdr['version'] except (KeyError, ValueError): # dict or structured array @@ -797,6 +800,9 @@ class TrackvisFile(object): relationship between voxels, rasmm and voxmm space (above). ''' + @deprecate_with_version('TrackvisFile is deprecated; please use ' + 'nibabel.streamlines.TrkFile, instead.', + since='2.5.0', until='4.0.0') def __init__(self, streamlines, mapping=None, @@ -836,7 +842,7 @@ def to_file(self, file_like): self.filename = (file_like if isinstance(file_like, basestring) else None) - def get_affine(self, atleast_v2=None): + def get_affine(self, atleast_v2=True): """ Get affine from header in object Returns @@ -853,15 +859,9 @@ def get_affine(self, atleast_v2=None): consider it unsafe for version 1 headers, and in future versions of nibabel we will raise an error for trackvis headers < version 2. """ - if atleast_v2 is None: - warnings.warn('Defaulting to `atleast_v2` of False. Future ' - 'versions will default to True', - FutureWarning, - stacklevel=2) - atleast_v2 = False return aff_from_hdr(self.header, atleast_v2) - def set_affine(self, affine, pos_vox=None, set_order=None): + def set_affine(self, affine, pos_vox=True, set_order=True): """ Set affine `affine` into trackvis header Affine is mapping from voxel space to Nifti RAS) output coordinate @@ -888,16 +888,4 @@ def set_affine(self, affine, pos_vox=None, set_order=None): ------- None """ - if pos_vox is None: - warnings.warn('Default for ``pos_vox`` will change to True in ' - 'future versions of nibabel', - FutureWarning, - stacklevel=2) - pos_vox = False - if set_order is None: - warnings.warn('Default for ``set_order`` will change to True in ' - 'future versions of nibabel', - FutureWarning, - stacklevel=2) - set_order = False return aff_to_hdr(affine, self.header, pos_vox, set_order) From 2663b682d252570a87b2a4c68217ff756c926740 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 26 Apr 2019 07:42:58 -0400 Subject: [PATCH 109/689] DEP: Transition checkwarns to deprecations and set target version --- nibabel/checkwarns.py | 21 ++++++++------------- nibabel/tests/test_checkwarns.py | 13 +++---------- 2 files changed, 11 insertions(+), 23 deletions(-) diff --git a/nibabel/checkwarns.py b/nibabel/checkwarns.py index 01ef8fd10c..deb3f6f009 100644 --- a/nibabel/checkwarns.py +++ b/nibabel/checkwarns.py @@ -13,25 +13,20 @@ import warnings from .testing import (error_warnings, suppress_warnings) +from .deprecated import deprecate_with_version warnings.warn('The checkwarns module is deprecated and will be removed ' - 'in nibabel v3.0', FutureWarning) + 'in nibabel v3.0', DeprecationWarning) +@deprecate_with_version('ErrorWarnings is deprecated; use nibabel.testing.error_warnings.', + since='2.1.0', until='3.0.0') class ErrorWarnings(error_warnings): - - def __init__(self, *args, **kwargs): - warnings.warn('ErrorWarnings is deprecated and will be removed in ' - 'nibabel v3.0; use nibabel.testing.error_warnings.', - FutureWarning) - super(ErrorWarnings, self).__init__(*args, **kwargs) + pass +@deprecate_with_version('IgnoreWarnings is deprecated; use nibabel.testing.suppress_warnings.', + since='2.1.0', until='3.0.0') class IgnoreWarnings(suppress_warnings): - - def __init__(self, *args, **kwargs): - warnings.warn('IgnoreWarnings is deprecated and will be removed in ' - 'nibabel v3.0; use nibabel.testing.suppress_warnings.', - FutureWarning) - super(IgnoreWarnings, self).__init__(*args, **kwargs) + pass diff --git a/nibabel/tests/test_checkwarns.py b/nibabel/tests/test_checkwarns.py index 11c7422326..094feca588 100644 --- a/nibabel/tests/test_checkwarns.py +++ b/nibabel/tests/test_checkwarns.py @@ -1,21 +1,14 @@ """ Tests for warnings context managers """ -from __future__ import division, print_function, absolute_import - -from nose.tools import assert_equal -from ..testing import clear_and_catch_warnings, suppress_warnings +from ..testing import assert_equal, assert_warns, suppress_warnings def test_ignore_and_error_warnings(): with suppress_warnings(): from .. import checkwarns - with clear_and_catch_warnings() as w: + with assert_warns(DeprecationWarning): checkwarns.IgnoreWarnings() - assert_equal(len(w), 1) - assert_equal(w[0].category, FutureWarning) - with clear_and_catch_warnings() as w: + with assert_warns(DeprecationWarning): checkwarns.ErrorWarnings() - assert_equal(len(w), 1) - assert_equal(w[0].category, FutureWarning) From 54c38f843aef008c750ade47c7f13bdbd35db7cf Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 26 Apr 2019 07:48:56 -0400 Subject: [PATCH 110/689] DEP: Transition minc module to deprecated, announce 3.0 removal --- nibabel/minc.py | 4 ++-- nibabel/minc1.py | 18 +++++++++--------- nibabel/testing/__init__.py | 2 +- nibabel/tests/test_minc.py | 8 ++++++++ nibabel/tests/test_minc1.py | 16 +++++++++------- 5 files changed, 29 insertions(+), 19 deletions(-) create mode 100644 nibabel/tests/test_minc.py diff --git a/nibabel/minc.py b/nibabel/minc.py index 94e8da57fc..09523bdc36 100644 --- a/nibabel/minc.py +++ b/nibabel/minc.py @@ -2,9 +2,9 @@ import warnings -warnings.warn("We will remove this module from nibabel soon; " +warnings.warn("We will remove this module from nibabel 3.0; " "Please use the 'minc1' module instead", - FutureWarning, + DeprecationWarning, stacklevel=2) from .minc1 import * # noqa diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 369922bf99..a8535eec05 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -18,7 +18,7 @@ from .fileslice import canonical_slicers from .keywordonly import kw_only_meth -from .deprecated import FutureWarningMixin +from .deprecated import deprecate_with_version _dt_dict = { ('b', 'unsigned'): np.uint8, @@ -331,13 +331,13 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): # Backwards compatibility -class MincFile(FutureWarningMixin, Minc1File): - """ Deprecated alternative name for Minc1File - """ - warn_message = 'MincFile is deprecated; please use Minc1File instead' +@deprecate_with_version('MincFile is deprecated; please use Minc1File instead', + since='2.0.0', until='3.0.0', warn_class=FutureWarning) +class MincFile(Minc1File): + pass -class MincImage(FutureWarningMixin, Minc1Image): - """ Deprecated alternative name for Minc1Image - """ - warn_message = 'MincImage is deprecated; please use Minc1Image instead' +@deprecate_with_version('MincImage is deprecated; please use Minc1Image instead', + since='2.0.0', until='3.0.0', warn_class=FutureWarning) +class MincImage(Minc1Image): + pass diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 2c0a93fe32..2c87e13a10 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -16,7 +16,7 @@ from os.path import dirname, abspath, join as pjoin import numpy as np -from numpy.testing import assert_array_equal +from numpy.testing import assert_array_equal, assert_warns from numpy.testing import dec skipif = dec.skipif slow = dec.slow diff --git a/nibabel/tests/test_minc.py b/nibabel/tests/test_minc.py new file mode 100644 index 0000000000..1d59fdedf3 --- /dev/null +++ b/nibabel/tests/test_minc.py @@ -0,0 +1,8 @@ +from ..info import cmp_pkg_version +from ..testing import assert_raises + + +def test_minc_removed(): + if cmp_pkg_version('3.0.0dev') < 1: + with assert_raises(ImportError): + import nibabel.minc diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index cb59d921eb..1c150b02d5 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -24,10 +24,9 @@ from .. import minc1 from ..minc1 import Minc1File, Minc1Image, MincHeader -from nose.tools import (assert_true, assert_equal, assert_false, assert_raises) -from numpy.testing import assert_array_equal from ..tmpdirs import InTemporaryDirectory -from ..testing import data_path +from ..testing import (assert_true, assert_equal, assert_false, assert_raises, assert_warns, + assert_array_equal, data_path, clear_and_catch_warnings) from . import test_spatialimages as tsi from .test_fileslice import slicer_samples @@ -106,7 +105,8 @@ def test_old_namespace(): # Check warnings raised arr = np.arange(24).reshape((2, 3, 4)) aff = np.diag([2, 3, 4, 1]) - with warnings.catch_warnings(record=True) as warns: + with clear_and_catch_warnings() as warns: + warnings.simplefilter('always', DeprecationWarning) # Top level import. # This import does not trigger an import of the minc.py module, because # it's the proxy object. @@ -122,7 +122,9 @@ def test_old_namespace(): # depending on whether the minc.py module is already imported in this # test run. if not previous_import: - assert_equal(warns.pop(0).category, FutureWarning) + assert_equal(warns.pop(0).category, DeprecationWarning) + + with clear_and_catch_warnings() as warns: from .. import Minc1Image, MincImage assert_equal(warns, []) # The import from old module is the same as that from new @@ -132,17 +134,17 @@ def test_old_namespace(): assert_equal(warns, []) # Create object using old name mimg = MincImage(arr, aff) - assert_array_equal(mimg.get_data(), arr) # Call to create object created warning assert_equal(warns.pop(0).category, FutureWarning) + assert_array_equal(mimg.get_data(), arr) # Another old name from ..minc1 import MincFile, Minc1File assert_false(MincFile is Minc1File) assert_equal(warns, []) mf = MincFile(netcdf_file(EG_FNAME)) - assert_equal(mf.get_data_shape(), (10, 20, 20)) # Call to create object created warning assert_equal(warns.pop(0).category, FutureWarning) + assert_equal(mf.get_data_shape(), (10, 20, 20)) class _TestMincFile(object): From 46a3228028400cb4e4608870a5cb8c137f25bab0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 26 Apr 2019 08:02:26 -0400 Subject: [PATCH 111/689] DEP: Promote catch_warn_reset FutureWarning to DeprecationWarning --- nibabel/testing/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 2c87e13a10..16f2112299 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -21,6 +21,8 @@ skipif = dec.skipif slow = dec.slow +from ..deprecated import deprecate_with_version as _deprecate_with_version + # Allow failed import of nose if not now running tests try: from nose.tools import (assert_equal, assert_not_equal, @@ -187,12 +189,11 @@ class suppress_warnings(error_warnings): filter = 'ignore' +@_deprecate_with_version('catch_warn_reset is deprecated; use ' + 'nibabel.testing.clear_and_catch_warnings.', + since='2.1.0', until='3.0.0') class catch_warn_reset(clear_and_catch_warnings): - - def __init__(self, *args, **kwargs): - warnings.warn('catch_warn_reset is deprecated and will be removed in ' - 'nibabel v3.0; use nibabel.testing.clear_and_catch_warnings.', - FutureWarning) + pass EXTRA_SET = os.environ.get('NIPY_EXTRA_TESTS', '').split(',') From 8fd1bd1b02fd26c094679e1dd4b77871b532e7f9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 27 Apr 2019 22:21:49 -0400 Subject: [PATCH 112/689] TEST: Use old-style assert_warns --- nibabel/tests/test_checkwarns.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/nibabel/tests/test_checkwarns.py b/nibabel/tests/test_checkwarns.py index 094feca588..b1e6483273 100644 --- a/nibabel/tests/test_checkwarns.py +++ b/nibabel/tests/test_checkwarns.py @@ -7,8 +7,5 @@ def test_ignore_and_error_warnings(): with suppress_warnings(): from .. import checkwarns - with assert_warns(DeprecationWarning): - checkwarns.IgnoreWarnings() - - with assert_warns(DeprecationWarning): - checkwarns.ErrorWarnings() + assert_warns(DeprecationWarning, checkwarns.IgnoreWarnings) + assert_warns(DeprecationWarning, checkwarns.ErrorWarnings) From 50df61d3d071a3c0acc063aceaa99b8f5343694c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 29 Apr 2019 09:20:03 -0400 Subject: [PATCH 113/689] TEST: Create a test file for scheduling removals --- nibabel/tests/{test_minc.py => test_removalschedule.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename nibabel/tests/{test_minc.py => test_removalschedule.py} (100%) diff --git a/nibabel/tests/test_minc.py b/nibabel/tests/test_removalschedule.py similarity index 100% rename from nibabel/tests/test_minc.py rename to nibabel/tests/test_removalschedule.py From 48f3078cc4a02c4a945c407ea30c3c1d7bd318c8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 May 2019 09:44:25 -0400 Subject: [PATCH 114/689] MAINT: Set name to nibabel in setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 222ad8562a..0979c4daec 100755 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ messages = custom_pydicom_messages) def main(**extra_args): - setup(name=INFO.NAME, + setup(name='nibabel', maintainer=INFO.MAINTAINER, maintainer_email=INFO.MAINTAINER_EMAIL, description=INFO.DESCRIPTION, From 09a4f525a8b42e1249788bfcea28c9d2c7067224 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 27 May 2019 12:25:24 -0400 Subject: [PATCH 115/689] MNT: Bump version to 2.4.2dev --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index bccc4d03da..438ed33d6e 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -18,7 +18,7 @@ # (pre-release) version. _version_major = 2 _version_minor = 4 -_version_micro = 1 +_version_micro = 2 _version_extra = 'dev' # _version_extra = '' From 3c8ca398f94f7441fb9531edc87282dc5ce3bb9d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 17 Jun 2019 12:11:10 -0400 Subject: [PATCH 116/689] TEST: Generalize, schedule nibabel.checkwarns removal --- nibabel/tests/test_removalschedule.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 1d59fdedf3..d1d7e9ba1f 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,8 +1,15 @@ +from importlib import import_module from ..info import cmp_pkg_version from ..testing import assert_raises +SCHEDULE = [ + ('3.0.0', ('nibabel.minc', 'nibabel.checkwarns')), + ] -def test_minc_removed(): - if cmp_pkg_version('3.0.0dev') < 1: - with assert_raises(ImportError): - import nibabel.minc + +def test_removals(): + for version, to_remove in SCHEDULE: + if cmp_pkg_version(version) < 1: + for module in to_remove: + with assert_raises(ImportError, msg="Time to remove " + module): + import_module(module) From 4746a1caf38343f48c0465c48683770d1095c1de Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 17 Jun 2019 12:36:38 -0400 Subject: [PATCH 117/689] TEST: Verify ImportError, add object removal schedule, schedul trackvis removal --- nibabel/tests/test_removalschedule.py | 34 +++++++++++++++++++++------ 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index d1d7e9ba1f..6a6eead58f 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,15 +1,35 @@ -from importlib import import_module +import importlib from ..info import cmp_pkg_version -from ..testing import assert_raises +from ..testing import assert_raises, assert_false -SCHEDULE = [ - ('3.0.0', ('nibabel.minc', 'nibabel.checkwarns')), +MODULE_SCHEDULE = [ + ('4.0.0', ['nibabel.trackvis']), + ('3.0.0', ['nibabel.minc', 'nibabel.checkwarns']), + # Verify that the test will be quiet if the schedule outlives the modules + ('1.0.0', ['nibabel.neverexisted']), + ] + +OBJECT_SCHEDULE = [ + ('3.0.0', [('nibabel.testing', 'catch_warn_reset')]), + # Verify that the test will be quiet if the schedule outlives the modules + ('1.0.0', [('nibabel', 'neverexisted')]), ] -def test_removals(): - for version, to_remove in SCHEDULE: +def test_module_removal(): + for version, to_remove in MODULE_SCHEDULE: if cmp_pkg_version(version) < 1: for module in to_remove: with assert_raises(ImportError, msg="Time to remove " + module): - import_module(module) + importlib.__import__(module) + + +def test_object_removal(): + for version, to_remove in OBJECT_SCHEDULE: + if cmp_pkg_version(version) < 1: + for module_name, obj in to_remove: + try: + module = importlib.__import__(module_name) + except ImportError: + continue + assert_false(hasattr(module, obj), msg="Time to remove %s.%s" % (module_name, obj)) From b643ccd5f2cd94e7c4cba492b70cfc56cd1670b3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 17 Jun 2019 14:38:01 -0400 Subject: [PATCH 118/689] PY2: No importlib --- nibabel/tests/test_removalschedule.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 6a6eead58f..24f9bdd12c 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,4 +1,3 @@ -import importlib from ..info import cmp_pkg_version from ..testing import assert_raises, assert_false @@ -21,7 +20,7 @@ def test_module_removal(): if cmp_pkg_version(version) < 1: for module in to_remove: with assert_raises(ImportError, msg="Time to remove " + module): - importlib.__import__(module) + __import__(module) def test_object_removal(): @@ -29,7 +28,7 @@ def test_object_removal(): if cmp_pkg_version(version) < 1: for module_name, obj in to_remove: try: - module = importlib.__import__(module_name) + module = __import__(module_name) except ImportError: continue assert_false(hasattr(module, obj), msg="Time to remove %s.%s" % (module_name, obj)) From 053c429413eca8c2e96e599e6eebe177dbe9e76c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 19 Jun 2019 10:33:28 -0400 Subject: [PATCH 119/689] CI: Fix AppVeyor codecov paths --- codecov.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 codecov.yml diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000..0285fa4b06 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,2 @@ +fixes: + - "venv/Lib/site-packages/::" From 86b4fbedfb6919fab4e3ffe21536c5df2633c57b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 15 Jun 2019 14:28:04 +0200 Subject: [PATCH 120/689] FIX/TEST: Numpy casting rules have gotten more strict, raise different exception --- nibabel/tests/test_volumeutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 06df9eb6ad..fcdc5c2713 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -700,7 +700,7 @@ def test_a2f_non_numeric(): # Some versions of numpy can cast structured types to float, others not try: arr.astype(float) - except ValueError: + except (TypeError, ValueError): pass else: back_arr = write_return(arr, fobj, float) From bb64f19bd523e3943d5154b8ab30c81d940e1326 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 2 Jul 2018 15:19:40 -0400 Subject: [PATCH 121/689] ENH: Add FileBasedImage.serialize() --- nibabel/filebasedimages.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 7cc5b10648..2726e8afe2 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -511,3 +511,9 @@ def path_maybe_image(klass, filename, sniff=None, sniff_max=1024): if sniff is None or len(sniff[0]) < klass._meta_sniff_len: return False, sniff return klass.header_class.may_contain_header(sniff[0]), sniff + + def serialize(self): + bio = io.BytesIO() + file_map = self.make_file_map({'image': bio, 'header': bio}) + self.to_file_map(file_map) + return bio.getvalue() From 75ed05ad2e20544050ec65179dfa0c1ab9f9fe30 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 17 Jul 2018 16:14:14 -0400 Subject: [PATCH 122/689] DOC: FileBasedImage.serialize.__doc__ --- nibabel/filebasedimages.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 2726e8afe2..1058bdbc24 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -513,6 +513,18 @@ def path_maybe_image(klass, filename, sniff=None, sniff_max=1024): return klass.header_class.may_contain_header(sniff[0]), sniff def serialize(self): + """ Return a ``bytes`` object with the contents of the file that would + be written if the image were saved. + + Parameters + ---------- + None + + Returns + ------- + bytes + Serialized image + """ bio = io.BytesIO() file_map = self.make_file_map({'image': bio, 'header': bio}) self.to_file_map(file_map) From c278a94ffee8af45e2e333134b2547166dc092ef Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 17 Jul 2018 16:39:04 -0400 Subject: [PATCH 123/689] FIX: Import io --- nibabel/filebasedimages.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 1058bdbc24..470afe5204 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -8,6 +8,7 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Common interface for any image format--volume or surface, binary or xml.''' +import io from copy import deepcopy from six import string_types from .fileholders import FileHolder From ca60a1946a416789deda245a3029a69e5c20e023 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 17 Jul 2018 16:39:29 -0400 Subject: [PATCH 124/689] ENH: Test serialize via an API test mixin --- nibabel/tests/test_filebasedimages.py | 7 ++++--- nibabel/tests/test_image_api.py | 17 +++++++++++++++-- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index 9a6f8b3db7..0758ed0f5d 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -5,9 +5,9 @@ import numpy as np -from nibabel.filebasedimages import FileBasedHeader, FileBasedImage +from ..filebasedimages import FileBasedHeader, FileBasedImage -from nibabel.tests.test_image_api import GenericImageAPI +from .test_image_api import GenericImageAPI, SerializeMixin from nose.tools import (assert_true, assert_false, assert_equal, assert_not_equal) @@ -50,7 +50,8 @@ def set_data_dtype(self, dtype): self.arr = self.arr.astype(dtype) -class TestFBImageAPI(GenericImageAPI): +class TestFBImageAPI(GenericImageAPI, + SerializeMixin): """ Validation for FileBasedImage instances """ # A callable returning an image from ``image_maker(data, header)`` diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index ad8ff1c7f6..a65ee0cc61 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -493,6 +493,19 @@ def validate_affine_deprecated(self, imaker, params): assert_true(aff is img.get_affine()) +class SerializeMixin(object): + + def validate_serialize(self, imaker, params): + img = imaker() + serialized = img.serialize() + with InTemporaryDirectory(): + fname = 'img' + self.standard_extension + img.to_filename(fname) + with open(fname, 'rb') as fobj: + file_contents = fobj.read() + assert serialized == file_contents + + class LoadImageAPI(GenericImageAPI, DataInterfaceMixin, AffineMixin, @@ -613,7 +626,7 @@ class TestNifti1PairAPI(TestSpm99AnalyzeAPI): can_save = True -class TestNifti1API(TestNifti1PairAPI): +class TestNifti1API(TestNifti1PairAPI, SerializeMixin): klass = image_maker = Nifti1Image standard_extension = '.nii' @@ -660,7 +673,7 @@ def loader(self, fname): # standard_extension = '.v' -class TestMGHAPI(ImageHeaderAPI): +class TestMGHAPI(ImageHeaderAPI, SerializeMixin): klass = image_maker = MGHImage example_shapes = ((2, 3, 4), (2, 3, 4, 5)) # MGH can only do >= 3D has_scaling = True From 38fd9fc1497b683d03a6dd7ce1aef0ba58f59c8e Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Aug 2018 10:46:51 -0400 Subject: [PATCH 125/689] RF: Rename serialize to to_bytes, add from_bytes --- nibabel/filebasedimages.py | 51 ++++++++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 18 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 470afe5204..d6ca4d810d 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -260,6 +260,21 @@ def from_filename(klass, filename): file_map = klass.filespec_to_file_map(filename) return klass.from_file_map(file_map) + @classmethod + def from_bytes(klass, bstring): + """ Construct image from a byte string + + Class method + + Parameters + ---------- + bstring : bytes + Byte string containing the on-disk representation of an image + """ + bio = io.BytesIO(bstring) + file_map = self.make_file_map({'image': bio, 'header': bio}) + return klass.from_file_map(file_map) + @classmethod def from_file_map(klass, file_map): raise NotImplementedError @@ -334,6 +349,24 @@ def to_filename(self, filename): self.file_map = self.filespec_to_file_map(filename) self.to_file_map() + def to_bytes(self): + """ Return a ``bytes`` object with the contents of the file that would + be written if the image were saved. + + Parameters + ---------- + None + + Returns + ------- + bytes + Serialized image + """ + bio = io.BytesIO() + file_map = self.make_file_map({'image': bio, 'header': bio}) + self.to_file_map(file_map) + return bio.getvalue() + @deprecate_with_version('to_filespec method is deprecated.\n' 'Please use the "to_filename" method instead.', '1.0', '3.0') @@ -512,21 +545,3 @@ def path_maybe_image(klass, filename, sniff=None, sniff_max=1024): if sniff is None or len(sniff[0]) < klass._meta_sniff_len: return False, sniff return klass.header_class.may_contain_header(sniff[0]), sniff - - def serialize(self): - """ Return a ``bytes`` object with the contents of the file that would - be written if the image were saved. - - Parameters - ---------- - None - - Returns - ------- - bytes - Serialized image - """ - bio = io.BytesIO() - file_map = self.make_file_map({'image': bio, 'header': bio}) - self.to_file_map(file_map) - return bio.getvalue() From b729779c90db30d9ef1cec185f92a3d56628912f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 10 Aug 2018 22:33:07 -0400 Subject: [PATCH 126/689] RF: Factor SerializableImage --- nibabel/filebasedimages.py | 109 ++++++++++++++++++++++---------- nibabel/freesurfer/mghformat.py | 3 +- nibabel/nifti1.py | 3 +- nibabel/tests/test_image_api.py | 25 +++++++- 4 files changed, 102 insertions(+), 38 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index d6ca4d810d..05bfa06295 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -260,21 +260,6 @@ def from_filename(klass, filename): file_map = klass.filespec_to_file_map(filename) return klass.from_file_map(file_map) - @classmethod - def from_bytes(klass, bstring): - """ Construct image from a byte string - - Class method - - Parameters - ---------- - bstring : bytes - Byte string containing the on-disk representation of an image - """ - bio = io.BytesIO(bstring) - file_map = self.make_file_map({'image': bio, 'header': bio}) - return klass.from_file_map(file_map) - @classmethod def from_file_map(klass, file_map): raise NotImplementedError @@ -349,24 +334,6 @@ def to_filename(self, filename): self.file_map = self.filespec_to_file_map(filename) self.to_file_map() - def to_bytes(self): - """ Return a ``bytes`` object with the contents of the file that would - be written if the image were saved. - - Parameters - ---------- - None - - Returns - ------- - bytes - Serialized image - """ - bio = io.BytesIO() - file_map = self.make_file_map({'image': bio, 'header': bio}) - self.to_file_map(file_map) - return bio.getvalue() - @deprecate_with_version('to_filespec method is deprecated.\n' 'Please use the "to_filename" method instead.', '1.0', '3.0') @@ -545,3 +512,79 @@ def path_maybe_image(klass, filename, sniff=None, sniff_max=1024): if sniff is None or len(sniff[0]) < klass._meta_sniff_len: return False, sniff return klass.header_class.may_contain_header(sniff[0]), sniff + + +class SerializableImage(FileBasedImage): + ''' + Abstract image class for (de)serializing images to/from byte strings. + + The class doesn't define any image properties. + + It has: + + methods: + + * .to_bytes() - serialize image to byte string + + classmethods: + + * from_bytes(bytestring) - make instance by deserializing a byte string + + The following properties should hold: + + * ``klass.from_bytes(bstr).to_bytes() == bstr`` + * if ``img = orig.__class__.from_bytes(orig.to_bytes())``, then + ``img.header == orig.header`` and ``img.get_data() == orig.get_data()`` + + Further, for images that are single files on disk, the following methods of loading + the image must be equivalent: + + img = klass.from_filename(fname) + + with open(fname, 'rb') as fobj: + img = klass.from_bytes(fobj.read()) + + And the following methods of saving a file must be equivalent: + + img.to_filename(fname) + + with open(fname, 'wb') as fobj: + fobj.write(img.to_bytes()) + + Images that consist of separate header and data files will generally + place the header with the data, but if the header is not of fixed + size and does not define its own size, a new format may need to be + defined. + ''' + @classmethod + def from_bytes(klass, bytestring): + """ Construct image from a byte string + + Class method + + Parameters + ---------- + bstring : bytes + Byte string containing the on-disk representation of an image + """ + bio = io.BytesIO(bstring) + file_map = klass.make_file_map({'image': bio, 'header': bio}) + return klass.from_file_map(file_map) + + def to_bytes(self): + """ Return a ``bytes`` object with the contents of the file that would + be written if the image were saved. + + Parameters + ---------- + None + + Returns + ------- + bytes + Serialized image + """ + bio = io.BytesIO() + file_map = self.make_file_map({'image': bio, 'header': bio}) + self.to_file_map(file_map) + return bio.getvalue() diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 8ff783a865..37bc82cfb3 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -16,6 +16,7 @@ from ..affines import voxel_sizes, from_matvec from ..volumeutils import (array_to_file, array_from_file, endian_codes, Recoder) +from ..filebasedimages import SerializableImage from ..spatialimages import HeaderDataError, SpatialImage from ..fileholders import FileHolder from ..arrayproxy import ArrayProxy, reshape_dataobj @@ -503,7 +504,7 @@ def __setitem__(self, item, value): super(MGHHeader, self).__setitem__(item, value) -class MGHImage(SpatialImage): +class MGHImage(SpatialImage, SerializableImage): """ Class for MGH format image """ header_class = MGHHeader diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index e844936aaf..e12cb6543a 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -19,6 +19,7 @@ import numpy.linalg as npl from .py3k import asstr +from .filebasedimages import SerializableImage from .volumeutils import Recoder, make_dt_codes, endian_codes from .spatialimages import HeaderDataError, ImageFileError from .batteryrunners import Report @@ -1758,7 +1759,7 @@ class Nifti1PairHeader(Nifti1Header): is_single = False -class Nifti1Pair(analyze.AnalyzeImage): +class Nifti1Pair(analyze.AnalyzeImage, SerializableImage): """ Class for NIfTI1 format image, header pair """ header_class = Nifti1PairHeader diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index a65ee0cc61..e3105b682c 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -494,10 +494,9 @@ def validate_affine_deprecated(self, imaker, params): class SerializeMixin(object): - - def validate_serialize(self, imaker, params): + def validate_to_bytes(self, imaker, params): img = imaker() - serialized = img.serialize() + serialized = img.to_bytes() with InTemporaryDirectory(): fname = 'img' + self.standard_extension img.to_filename(fname) @@ -505,6 +504,26 @@ def validate_serialize(self, imaker, params): file_contents = fobj.read() assert serialized == file_contents + def validate_from_bytes(self, imaker, params): + for img_params in self.example_images: + img_a = self.klass.from_filename(img_params['fname']) + with open(img_params['fname'], 'rb') as fobj: + img_b = self.klass.from_bytes(fobj.read()) + + assert img_a.header == img_b.header + assert np.array_equal(img_a.get_data(), img_b.get_data()) + + def validate_round_trip(self, imaker, params): + for img_params in self.example_images: + img_a = self.klass.from_filename(img_params['fname']) + bytes_a = img_a.to_bytes() + + img_b = self.klass.from_bytes(bytes_a) + + assert img_b.to_bytes() == bytes_a + assert img_a.header == img_b.header + assert np.array_equal(img_a.get_data(), img_b.get_data()) + class LoadImageAPI(GenericImageAPI, DataInterfaceMixin, From c943e9d8b5a3399ca7957d3e65b4b254c25c06b3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 13 Aug 2018 22:08:07 -0400 Subject: [PATCH 127/689] TEST: Generate file when no examples available --- nibabel/filebasedimages.py | 2 +- nibabel/tests/test_filebasedimages.py | 13 +++++++-- nibabel/tests/test_image_api.py | 40 +++++++++++++++++---------- 3 files changed, 37 insertions(+), 18 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 05bfa06295..bfbe50349b 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -567,7 +567,7 @@ def from_bytes(klass, bytestring): bstring : bytes Byte string containing the on-disk representation of an image """ - bio = io.BytesIO(bstring) + bio = io.BytesIO(bytestring) file_map = klass.make_file_map({'image': bio, 'header': bio}) return klass.from_file_map(file_map) diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index 0758ed0f5d..0bab751e29 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -5,7 +5,7 @@ import numpy as np -from ..filebasedimages import FileBasedHeader, FileBasedImage +from ..filebasedimages import FileBasedHeader, FileBasedImage, SerializableImage from .test_image_api import GenericImageAPI, SerializeMixin @@ -50,8 +50,11 @@ def set_data_dtype(self, dtype): self.arr = self.arr.astype(dtype) -class TestFBImageAPI(GenericImageAPI, - SerializeMixin): +class SerializableNumpyImage(FBNumpyImage, SerializableImage): + pass + + +class TestFBImageAPI(GenericImageAPI): """ Validation for FileBasedImage instances """ # A callable returning an image from ``image_maker(data, header)`` @@ -81,6 +84,10 @@ def obj_params(self): yield func, params +class TestSerializableImageAPI(TestFBImageAPI, SerializeMixin): + image_maker = SerializableNumpyImage + + def test_filebased_header(): # Test stuff about the default FileBasedHeader diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index e3105b682c..3a1b4f4e87 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -505,24 +505,36 @@ def validate_to_bytes(self, imaker, params): assert serialized == file_contents def validate_from_bytes(self, imaker, params): - for img_params in self.example_images: - img_a = self.klass.from_filename(img_params['fname']) - with open(img_params['fname'], 'rb') as fobj: - img_b = self.klass.from_bytes(fobj.read()) + img = imaker() + with InTemporaryDirectory(): + fname = 'img' + self.standard_extension + img.to_filename(fname) - assert img_a.header == img_b.header - assert np.array_equal(img_a.get_data(), img_b.get_data()) + all_images = list(getattr(self, 'example_images', [])) + [{'fname': fname}] + for img_params in all_images: + img_a = self.klass.from_filename(img_params['fname']) + with open(img_params['fname'], 'rb') as fobj: + img_b = self.klass.from_bytes(fobj.read()) - def validate_round_trip(self, imaker, params): - for img_params in self.example_images: - img_a = self.klass.from_filename(img_params['fname']) - bytes_a = img_a.to_bytes() + assert img_a.header == img_b.header + assert np.array_equal(img_a.get_data(), img_b.get_data()) + + def validate_to_from_bytes(self, imaker, params): + img = imaker() + with InTemporaryDirectory(): + fname = 'img' + self.standard_extension + img.to_filename(fname) + + all_images = list(getattr(self, 'example_images', [])) + [{'fname': fname}] + for img_params in all_images: + img_a = self.klass.from_filename(img_params['fname']) + bytes_a = img_a.to_bytes() - img_b = self.klass.from_bytes(bytes_a) + img_b = self.klass.from_bytes(bytes_a) - assert img_b.to_bytes() == bytes_a - assert img_a.header == img_b.header - assert np.array_equal(img_a.get_data(), img_b.get_data()) + assert img_b.to_bytes() == bytes_a + assert img_a.header == img_b.header + assert np.array_equal(img_a.get_data(), img_b.get_data()) class LoadImageAPI(GenericImageAPI, From 96bc5a0214ca4ffa251d3ca12e365b61d8540abe Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 13 Aug 2018 22:29:31 -0400 Subject: [PATCH 128/689] TEST: klass sometimes missing, equality sometimes undefined --- nibabel/tests/test_image_api.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 3a1b4f4e87..e64b8c9663 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -506,36 +506,55 @@ def validate_to_bytes(self, imaker, params): def validate_from_bytes(self, imaker, params): img = imaker() + klass = getattr(self, 'klass', img.__class__) with InTemporaryDirectory(): fname = 'img' + self.standard_extension img.to_filename(fname) all_images = list(getattr(self, 'example_images', [])) + [{'fname': fname}] for img_params in all_images: - img_a = self.klass.from_filename(img_params['fname']) + img_a = klass.from_filename(img_params['fname']) with open(img_params['fname'], 'rb') as fobj: - img_b = self.klass.from_bytes(fobj.read()) + img_b = klass.from_bytes(fobj.read()) - assert img_a.header == img_b.header + assert self._header_eq(img_a.header, img_b.header) assert np.array_equal(img_a.get_data(), img_b.get_data()) def validate_to_from_bytes(self, imaker, params): img = imaker() + klass = getattr(self, 'klass', img.__class__) with InTemporaryDirectory(): fname = 'img' + self.standard_extension img.to_filename(fname) all_images = list(getattr(self, 'example_images', [])) + [{'fname': fname}] for img_params in all_images: - img_a = self.klass.from_filename(img_params['fname']) + img_a = klass.from_filename(img_params['fname']) bytes_a = img_a.to_bytes() - img_b = self.klass.from_bytes(bytes_a) + img_b = klass.from_bytes(bytes_a) assert img_b.to_bytes() == bytes_a - assert img_a.header == img_b.header + assert self._header_eq(img_a.header, img_b.header) assert np.array_equal(img_a.get_data(), img_b.get_data()) + @staticmethod + def _header_eq(header_a, header_b): + """ Quick-and-dirty header equality check + + Abstract classes may have undefined equality, in which case test for + same type + """ + not_implemented = False + header_eq = True + try: + header_eq = header_a == header_b + except NotImplementedError: + header_eq = type(header_a) == type(header_b) + + return header_eq + + class LoadImageAPI(GenericImageAPI, DataInterfaceMixin, From 08a7bab4fd6a2de4a923e2e7af2d08d3bc797c2c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 13 Aug 2018 22:46:38 -0400 Subject: [PATCH 129/689] ENH: Add to/from_bytes interface to GiftiImage --- nibabel/__init__.py | 1 + nibabel/gifti/gifti.py | 7 +++++-- nibabel/tests/test_image_api.py | 7 +++++++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index fca22ccc99..d8c877d206 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -65,6 +65,7 @@ def setup_test(): from .minc1 import Minc1Image from .minc2 import Minc2Image from .cifti2 import Cifti2Header, Cifti2Image +from .gifti import GiftiImage # Deprecated backwards compatiblity for MINC1 from .deprecated import ModuleProxy as _ModuleProxy minc = _ModuleProxy('nibabel.minc') diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 997ba78523..22d6449e9a 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -18,7 +18,7 @@ import numpy as np from .. import xmlutils as xml -from ..filebasedimages import FileBasedImage +from ..filebasedimages import SerializableImage from ..nifti1 import data_type_codes, xform_codes, intent_codes from .util import (array_index_order_codes, gifti_encoding_codes, gifti_endian_codes, KIND2FMT) @@ -534,7 +534,7 @@ def metadata(self): return self.meta.metadata -class GiftiImage(xml.XmlSerializable, FileBasedImage): +class GiftiImage(xml.XmlSerializable, SerializableImage): """ GIFTI image object The Gifti spec suggests using the following suffixes to your @@ -724,6 +724,9 @@ def to_xml(self, enc='utf-8'): """ + xml.XmlSerializable.to_xml(self, enc) + # Avoid the indirection of going through to_file_map + to_bytes = to_xml + def to_file_map(self, file_map=None): """ Save the current image to the specified file_map diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index e64b8c9663..01d406b4e6 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -38,6 +38,7 @@ from .. import (AnalyzeImage, Spm99AnalyzeImage, Spm2AnalyzeImage, Nifti1Pair, Nifti1Image, Nifti2Pair, Nifti2Image, + GiftiImage, MGHImage, Minc1Image, Minc2Image, is_proxy) from ..spatialimages import SpatialImage from .. import minc1, minc2, parrec, brikhead @@ -731,6 +732,12 @@ class TestMGHAPI(ImageHeaderAPI, SerializeMixin): standard_extension = '.mgh' +class TestGiftiAPI(LoadImageAPI, SerializeMixin): + klass = image_maker = GiftiImage + can_save = True + standard_extension = '.gii' + + class TestAFNIAPI(LoadImageAPI): loader = brikhead.load klass = image_maker = brikhead.AFNIImage From 372ac68bc4c09259cb46d584e63c321a91a05982 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 14 Aug 2018 09:32:01 -0400 Subject: [PATCH 130/689] TEST: Delete images so mmapped files can be removed --- nibabel/tests/test_image_api.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 01d406b4e6..ac24c4415f 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -520,6 +520,8 @@ def validate_from_bytes(self, imaker, params): assert self._header_eq(img_a.header, img_b.header) assert np.array_equal(img_a.get_data(), img_b.get_data()) + del img_a + del img_b def validate_to_from_bytes(self, imaker, params): img = imaker() @@ -538,6 +540,8 @@ def validate_to_from_bytes(self, imaker, params): assert img_b.to_bytes() == bytes_a assert self._header_eq(img_a.header, img_b.header) assert np.array_equal(img_a.get_data(), img_b.get_data()) + del img_a + del img_b @staticmethod def _header_eq(header_a, header_b): From e197b72c05150aa8ec35af91e6d498270187b67b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 26 Sep 2018 12:08:59 -0400 Subject: [PATCH 131/689] DOC: Improve docstring --- nibabel/filebasedimages.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index bfbe50349b..6940e83eb4 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -79,8 +79,8 @@ class FileBasedImage(object): methods: - * .get_header() (deprecated, use header property instead) - * .to_filename(fname) - writes data to filename(s) derived from + * get_header() (deprecated, use header property instead) + * to_filename(fname) - writes data to filename(s) derived from ``fname``, where the derivation may differ between formats. * to_file_map() - save image to files with which the image is already associated. @@ -524,21 +524,27 @@ class SerializableImage(FileBasedImage): methods: - * .to_bytes() - serialize image to byte string + * to_bytes() - serialize image to byte string classmethods: * from_bytes(bytestring) - make instance by deserializing a byte string - The following properties should hold: + Loading from byte strings should provide round-trip equivalence: - * ``klass.from_bytes(bstr).to_bytes() == bstr`` - * if ``img = orig.__class__.from_bytes(orig.to_bytes())``, then - ``img.header == orig.header`` and ``img.get_data() == orig.get_data()`` + .. code:: python + + img_a = klass.from_bytes(bstr) + img_b = klass.from_bytes(img_a.to_bytes()) + + np.allclose(img_a.get_fdata(), img_b.get_fdata()) + np.allclose(img_a.affine, img_b.affine) Further, for images that are single files on disk, the following methods of loading the image must be equivalent: + .. code:: python + img = klass.from_filename(fname) with open(fname, 'rb') as fobj: @@ -546,15 +552,15 @@ class SerializableImage(FileBasedImage): And the following methods of saving a file must be equivalent: + .. code:: python + img.to_filename(fname) with open(fname, 'wb') as fobj: fobj.write(img.to_bytes()) - Images that consist of separate header and data files will generally - place the header with the data, but if the header is not of fixed - size and does not define its own size, a new format may need to be - defined. + Images that consist of separate header and data files (e.g., Analyze + images) currently do not support this interface. ''' @classmethod def from_bytes(klass, bytestring): From eae0342626468084e38b7308110e24059aeff54d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 20 Jun 2019 07:58:58 -0400 Subject: [PATCH 132/689] FIX: Nifti1Image is serializable, not Nifti1Pair --- nibabel/nifti1.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index e12cb6543a..a050195234 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -1759,7 +1759,7 @@ class Nifti1PairHeader(Nifti1Header): is_single = False -class Nifti1Pair(analyze.AnalyzeImage, SerializableImage): +class Nifti1Pair(analyze.AnalyzeImage): """ Class for NIfTI1 format image, header pair """ header_class = Nifti1PairHeader @@ -2026,7 +2026,7 @@ def as_reoriented(self, ornt): return img -class Nifti1Image(Nifti1Pair): +class Nifti1Image(Nifti1Pair, SerializableImage): """ Class for single file NIfTI1 format image """ header_class = Nifti1Header From 30bea56bf5dbb412a5e7b48085d3aef388cf2d3f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 20 Jun 2019 08:01:01 -0400 Subject: [PATCH 133/689] ENH: Check for multi-file images, to ensure well-defined behavior --- nibabel/filebasedimages.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 6940e83eb4..b9898cc496 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -573,6 +573,8 @@ def from_bytes(klass, bytestring): bstring : bytes Byte string containing the on-disk representation of an image """ + if len(klass.files_types) > 1: + raise NotImplementedError("from_bytes is undefined for multi-file images") bio = io.BytesIO(bytestring) file_map = klass.make_file_map({'image': bio, 'header': bio}) return klass.from_file_map(file_map) @@ -590,6 +592,8 @@ def to_bytes(self): bytes Serialized image """ + if len(self.__class__.files_types) > 1: + raise NotImplementedError("to_bytes() is undefined for multi-file images") bio = io.BytesIO() file_map = self.make_file_map({'image': bio, 'header': bio}) self.to_file_map(file_map) From 562fac8a4b7357c232b3e39ad3b4a2c7556b8e32 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 20 Jun 2019 08:21:59 -0400 Subject: [PATCH 134/689] TEST: Move special case header equality check into TestSerializableImageAPI --- nibabel/tests/test_filebasedimages.py | 6 ++++++ nibabel/tests/test_image_api.py | 16 +++++----------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index 0bab751e29..c9d256edbb 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -87,6 +87,12 @@ def obj_params(self): class TestSerializableImageAPI(TestFBImageAPI, SerializeMixin): image_maker = SerializableNumpyImage + @staticmethod + def _header_eq(header_a, header_b): + """ FileBasedHeader is an abstract class, so __eq__ is undefined. + Checking for the same header type is sufficient, here. """ + return type(header_a) == type(header_b) == FileBasedHeader + def test_filebased_header(): # Test stuff about the default FileBasedHeader diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index ac24c4415f..ac2a2428c4 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -545,19 +545,13 @@ def validate_to_from_bytes(self, imaker, params): @staticmethod def _header_eq(header_a, header_b): - """ Quick-and-dirty header equality check + """ Header equality check that can be overridden by a subclass of this test - Abstract classes may have undefined equality, in which case test for - same type + This allows us to retain the same tests above when testing an image that uses an + abstract class as a header, namely when testing the FileBasedImage API, which + raises a NotImplementedError for __eq__ """ - not_implemented = False - header_eq = True - try: - header_eq = header_a == header_b - except NotImplementedError: - header_eq = type(header_a) == type(header_b) - - return header_eq + return header_a == header_b From f829919bf682cfd0d0cd5fccad92c29ed4b3fd07 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 20 Jun 2019 10:44:17 -0400 Subject: [PATCH 135/689] STY/DOC: Note about multi-file images, newline --- nibabel/filebasedimages.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index b9898cc496..c17701bc2e 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -561,7 +561,10 @@ class SerializableImage(FileBasedImage): Images that consist of separate header and data files (e.g., Analyze images) currently do not support this interface. + For multi-file images, ``to_bytes()`` and ``from_bytes()`` must be + overridden, and any encoding details should be documented. ''' + @classmethod def from_bytes(klass, bytestring): """ Construct image from a byte string From a89c536613c7038b650201a2ee34db8656657160 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 25 May 2019 14:04:25 -0400 Subject: [PATCH 136/689] MAINT: Full-speed setuptools --- doc/source/conf.py | 9 +++- nibabel/info.py | 41 ---------------- setup.cfg | 79 ++++++++++++++++++++++++++++++ setup.py | 106 ++-------------------------------------- tools/refresh_readme.py | 2 +- tox.ini | 5 -- 6 files changed, 90 insertions(+), 152 deletions(-) create mode 100644 setup.cfg diff --git a/doc/source/conf.py b/doc/source/conf.py index d6e14e1a70..206efe0a75 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -21,6 +21,7 @@ import sys import os +from configparser import ConfigParser # Check for external Sphinx extensions we depend on try: @@ -53,7 +54,11 @@ # Write long description from info with open('_long_description.inc', 'wt') as fobj: - fobj.write(rel['LONG_DESCRIPTION']) + fobj.write(rel['long_description']) + +# Load metadata from setup.cfg +config = ConfigParser() +config.read(os.path.join('..', '..', 'setup.cfg')) # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. @@ -87,7 +92,7 @@ # General information about the project. project = u'NiBabel' -copyright = u'2006-2019, %(MAINTAINER)s <%(AUTHOR_EMAIL)s>' % rel +copyright = u'2006-2019, %(maintainer)s <%(author_email)s>' % config['metadata'] # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/nibabel/info.py b/nibabel/info.py index 45d4147c13..909f0eadb0 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -84,16 +84,6 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): else _cmp(extra, pkg_extra)) -CLASSIFIERS = ["Development Status :: 4 - Beta", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Topic :: Scientific/Engineering"] - -description = 'Access a multitude of neuroimaging data formats' - # Note: this long_description is the canonical place to edit this text. # It also appears in README.rst, but it should get there by running # ``tools/refresh_readme.py`` which pulls in this version. @@ -183,34 +173,3 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): .. _zenodo: https://zenodo.org .. _Digital Object Identifier: https://en.wikipedia.org/wiki/Digital_object_identifier """ - -# versions for dependencies. Check these against: -# doc/source/installation.rst -# requirements.txt -# .travis.yml -NUMPY_MIN_VERSION = '1.8' -PYDICOM_MIN_VERSION = '0.9.9' -SIX_MIN_VERSION = '1.3' - -# Main setup parameters -NAME = 'nibabel' -MAINTAINER = "Chris Markiewicz" -MAINTAINER_EMAIL = "neuroimaging@python.org" -DESCRIPTION = description -LONG_DESCRIPTION = long_description -URL = "http://nipy.org/nibabel" -DOWNLOAD_URL = "https://github.com/nipy/nibabel" -LICENSE = "MIT license" -CLASSIFIERS = CLASSIFIERS -AUTHOR = "nibabel developers" -AUTHOR_EMAIL = "neuroimaging@python.org" -PLATFORMS = "OS Independent" -MAJOR = _version_major -MINOR = _version_minor -MICRO = _version_micro -ISRELEASE = _version_extra == '' -VERSION = __version__ -PROVIDES = ["nibabel", 'nisext'] -REQUIRES = ["numpy>=%s" % NUMPY_MIN_VERSION, - "six>=%s" % SIX_MIN_VERSION, - 'bz2file; python_version < "3.0"'] diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000..0bcda1324e --- /dev/null +++ b/setup.cfg @@ -0,0 +1,79 @@ +[metadata] +name = nibabel +version = attr: nibabel.__version__ +url = https://nipy.org/nibabel +download_url = https://github.com/nipy/nibabel +author = nibabel developers +author_email = neuroimaging@python.org +maintainer = Chris Markiewicz +maintainer_email = neuroimaging@python.org +classifiers = + Development Status :: 4 - Beta + Environment :: Console + Intended Audience :: Science/Research + License :: OSI Approved :: MIT License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3.4 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Topic :: Scientific/Engineering +license = MIT License +description = Access a multitude of neuroimaging data formats +long_description = file:README.rst +long_description_content_type = text/x-rst; charset=UTF-8 +platforms = OS Independent +provides = + nibabel + nisext + +[options] +python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* +install_requires = + numpy >=1.8 + six >=1.3 + bz2file ; python_version < "3.0" +tests_require = nose +test_suite = nose.collector +zip_safe = False +packages = find: +include_package_data = True + +[options.extras_require] +dicom = + dicom >=0.9.9 +doc = + sphinx >=0.3 +test = + nose >=0.10.1 +all = + %(dicom)s + %(doc)s + %(test)s + +[options.entry_points] +console_scripts = + nib-ls=nibabel.cmdline.ls:main + nib-dicomfs=nibabel.cmdline.dicomfs:main + nib-diff=nibabel.cmdline.diff:main + nib-nifti-dx=nibabel.cmdline.nifti_dx:main + nib-tck2trk=nibabel.cmdline.tck2trk:main + nib-trk2tck=nibabel.cmdline.trk2tck:main + parrec2nii=nibabel.cmdline.parrec2nii:main + +[options.package_data] +nibabel = + tests/data/* + */tests/data/* + +[flake8] +max-line-length = 100 +ignore = D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D208,D209,D210,D300,D301,D400,D401,D403,E24,E121,E123,E126,E226,E266,E402,E704,E731,F821,I100,I101,I201,N802,N803,N804,N806,W503,W504,W605 +exclude = + *test* + *sphinx* + nibabel/externals/* + */__init__.py + diff --git a/setup.py b/setup.py index 0979c4daec..a69a0e9f17 100755 --- a/setup.py +++ b/setup.py @@ -10,9 +10,6 @@ """Build helper.""" import os -from os.path import join as pjoin -import sys -from functools import partial # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly # update it when the contents of directories change. @@ -21,105 +18,8 @@ from setuptools import setup -# Commit hash writing, and dependency checking -from nisext.sexts import (get_comrec_build, package_check, install_scripts_bat, - read_vars_from) -cmdclass = {'build_py': get_comrec_build('nibabel'), - 'install_scripts': install_scripts_bat} - -# Get project related strings. -INFO = read_vars_from(pjoin('nibabel', 'info.py')) - -# Prepare setuptools args -if 'setuptools' in sys.modules: - extra_setuptools_args = dict( - tests_require=['nose'], - test_suite='nose.collector', - zip_safe=False, - extras_require=dict( - doc='Sphinx>=0.3', - test='nose>=0.10.1'), - ) - pkg_chk = partial(package_check, setuptools_args = extra_setuptools_args) -else: - extra_setuptools_args = {} - pkg_chk = package_check - -# Do dependency checking -pkg_chk('numpy', INFO.NUMPY_MIN_VERSION) -pkg_chk('six', INFO.SIX_MIN_VERSION) -custom_pydicom_messages = {'missing opt': 'Missing optional package "%s"' - ' provided by package "pydicom"' -} -pkg_chk('dicom', - INFO.PYDICOM_MIN_VERSION, - optional='dicom', - messages = custom_pydicom_messages) - -def main(**extra_args): - setup(name='nibabel', - maintainer=INFO.MAINTAINER, - maintainer_email=INFO.MAINTAINER_EMAIL, - description=INFO.DESCRIPTION, - long_description=INFO.LONG_DESCRIPTION, - url=INFO.URL, - download_url=INFO.DOWNLOAD_URL, - license=INFO.LICENSE, - classifiers=INFO.CLASSIFIERS, - author=INFO.AUTHOR, - author_email=INFO.AUTHOR_EMAIL, - platforms=INFO.PLATFORMS, - version=INFO.VERSION, - provides=INFO.PROVIDES, - install_requires=INFO.REQUIRES, - packages = ['nibabel', - 'nibabel.externals', - 'nibabel.externals.tests', - 'nibabel.gifti', - 'nibabel.gifti.tests', - 'nibabel.cifti2', - 'nibabel.cifti2.tests', - 'nibabel.cmdline', - 'nibabel.cmdline.tests', - 'nibabel.nicom', - 'nibabel.freesurfer', - 'nibabel.freesurfer.tests', - 'nibabel.nicom.tests', - 'nibabel.testing', - 'nibabel.tests', - 'nibabel.benchmarks', - 'nibabel.streamlines', - 'nibabel.streamlines.tests', - # install nisext as its own package - 'nisext', - 'nisext.tests'], - # The package_data spec has no effect for me (on python 2.6) -- even - # changing to data_files doesn't get this stuff included in the source - # distribution -- not sure if it has something to do with the magic - # above, but distutils is surely the worst piece of code in all of - # python -- duplicating things into MANIFEST.in but this is admittedly - # only a workaround to get things started -- not a solution - package_data = {'nibabel': - [pjoin('tests', 'data', '*'), - pjoin('externals', 'tests', 'data', '*'), - pjoin('nicom', 'tests', 'data', '*'), - pjoin('gifti', 'tests', 'data', '*'), - pjoin('streamlines', 'tests', 'data', '*'), - ]}, - scripts = [pjoin('bin', 'parrec2nii'), - pjoin('bin', 'nib-ls'), - pjoin('bin', 'nib-dicomfs'), - pjoin('bin', 'nib-nifti-dx'), - pjoin('bin', 'nib-tck2trk'), - pjoin('bin', 'nib-trk2tck'), - pjoin('bin', 'nib-diff'), - ], - cmdclass = cmdclass, - **extra_args - ) - +# Commit hash writing +from nisext.sexts import get_comrec_build if __name__ == "__main__": - # Do not use nisext's dynamically updated install_requires - extra_setuptools_args.pop('install_requires', None) - main(**extra_setuptools_args) + setup(cmdclass={'build_py': get_comrec_build('nibabel')}) diff --git a/tools/refresh_readme.py b/tools/refresh_readme.py index b64ee6e8c1..59076442c7 100755 --- a/tools/refresh_readme.py +++ b/tools/refresh_readme.py @@ -19,7 +19,7 @@ rel = runpy.run_path(os.path.join('nibabel', 'info.py')) -readme = ''.join(readme_lines) + '\n' + rel['LONG_DESCRIPTION'] +readme = ''.join(readme_lines) + '\n' + rel['long_description'] with open('README.rst', 'wt') as fobj: fobj.write(readme) diff --git a/tox.ini b/tox.ini index 5585639795..a0002e12b6 100644 --- a/tox.ini +++ b/tox.ini @@ -18,8 +18,3 @@ deps = deps = [testenv:np-1.2.1] deps = -[flake8] -max-line-length=100 -ignore=D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D208,D209,D210,D300,D301,D400,D401,D403,E24,E121,E123,E126,E226,E266,E402,E704,E731,F821,I100,I101,I201,N802,N803,N804,N806,W503,W504,W605 -exclude=*test*,*sphinx*,nibabel/externals/*,*/__init__.py - From 80b6174d32a703071b69c99dc2b41a0d1b01cabe Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 24 Jun 2019 10:30:21 -0400 Subject: [PATCH 137/689] MAINT: Restore reading version from info.py --- nibabel/info.py | 2 ++ setup.cfg | 1 - setup.py | 8 ++++++-- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index 909f0eadb0..4fa1b44b1c 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -173,3 +173,5 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): .. _zenodo: https://zenodo.org .. _Digital Object Identifier: https://en.wikipedia.org/wiki/Digital_object_identifier """ + +VERSION = __version__ diff --git a/setup.cfg b/setup.cfg index 0bcda1324e..69bd84afe7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,5 @@ [metadata] name = nibabel -version = attr: nibabel.__version__ url = https://nipy.org/nibabel download_url = https://github.com/nipy/nibabel author = nibabel developers diff --git a/setup.py b/setup.py index a69a0e9f17..18c052d041 100755 --- a/setup.py +++ b/setup.py @@ -19,7 +19,11 @@ from setuptools import setup # Commit hash writing -from nisext.sexts import get_comrec_build +from nisext.sexts import get_comrec_build, read_vars_from + +INFO = read_vars_from(os.path.join('nibabel', 'info.py')) if __name__ == "__main__": - setup(cmdclass={'build_py': get_comrec_build('nibabel')}) + setup(name='nibabel', + version=INFO.VERSION, + cmdclass={'build_py': get_comrec_build('nibabel')}) From 230a178cacfa6f0fba56e2a80e7f5ed3703510b0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 24 Jun 2019 11:07:37 -0400 Subject: [PATCH 138/689] PY2: ConfigParser for Sphinx build --- doc/source/conf.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 206efe0a75..cdc773e35a 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -21,7 +21,10 @@ import sys import os -from configparser import ConfigParser +try: + from configparser import ConfigParser +except ImportError: + from ConfigParser import ConfigParser # PY2 # Check for external Sphinx extensions we depend on try: @@ -59,6 +62,10 @@ # Load metadata from setup.cfg config = ConfigParser() config.read(os.path.join('..', '..', 'setup.cfg')) +try: + metadata = config['metadata'] +except AttributeError: + metadata = dict(config.items('metadata')) # PY2 # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. @@ -92,7 +99,7 @@ # General information about the project. project = u'NiBabel' -copyright = u'2006-2019, %(maintainer)s <%(author_email)s>' % config['metadata'] +copyright = u'2006-2019, %(maintainer)s <%(author_email)s>' % metadata # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From 157d316c106ea8509b2866b456545ac30a580c07 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 24 Jun 2019 11:24:11 -0400 Subject: [PATCH 139/689] WIN: Drop .bat suffix for scripts --- nibabel/tests/scriptrunner.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/nibabel/tests/scriptrunner.py b/nibabel/tests/scriptrunner.py index a82fdaa1e8..c5b37df80f 100644 --- a/nibabel/tests/scriptrunner.py +++ b/nibabel/tests/scriptrunner.py @@ -129,9 +129,6 @@ def run_command(self, cmd, check_code=True): # the script through the Python interpreter cmd = [sys.executable, pjoin(self.local_script_dir, cmd[0])] + cmd[1:] - elif os.name == 'nt': - # Need .bat file extension for windows - cmd[0] += '.bat' if os.name == 'nt': # Quote any arguments with spaces. The quotes delimit the arguments # on Windows, and the arguments might be file paths with spaces. From 3221e53574ddf367f6f98d6f68492fbd75054a2c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 24 Jun 2019 11:25:33 -0400 Subject: [PATCH 140/689] MAINT: externals/tests/data skipped in sdist --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 11bf20b7c2..ec2f348805 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -6,6 +6,7 @@ recursive-include tools * # put this stuff back into setup.py (package_data) once I'm enlightened # enough to accomplish this herculean task recursive-include nibabel/tests/data * +recursive-include nibabel/externals/tests/data * recursive-include nibabel/nicom/tests/data * recursive-include nibabel/gifti/tests/data * include nibabel/COMMIT_INFO.txt From 754ab3353de6b7270ffe20d273c987d7c1e3d5de Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 24 Jun 2019 17:39:04 -0400 Subject: [PATCH 141/689] MAINT: Add minimum setuptools version to setup.py --- setup.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/setup.py b/setup.py index 18c052d041..5e0468dd84 100755 --- a/setup.py +++ b/setup.py @@ -11,12 +11,7 @@ import os -# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly -# update it when the contents of directories change. -if os.path.exists('MANIFEST'): - os.remove('MANIFEST') - -from setuptools import setup +import setuptools # Commit hash writing from nisext.sexts import get_comrec_build, read_vars_from @@ -24,6 +19,7 @@ INFO = read_vars_from(os.path.join('nibabel', 'info.py')) if __name__ == "__main__": - setup(name='nibabel', - version=INFO.VERSION, - cmdclass={'build_py': get_comrec_build('nibabel')}) + setuptools.setup(name='nibabel', + version=INFO.VERSION, + setup_requires=['setuptools>=30.3.0'], + cmdclass={'build_py': get_comrec_build('nibabel')}) From 2345bb4901e625e952253c8c8eabf8f3f4198acc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 24 Jun 2019 17:42:22 -0400 Subject: [PATCH 142/689] CI: Test minimum setuptools --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index fe2cdf0b97..e6111e8790 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,7 +33,7 @@ matrix: # Absolute minimum dependencies - python: 2.7 env: - - DEPENDS="numpy==1.8" + - DEPENDS="numpy==1.8 setuptools==30.3.0" # Absolute minimum dependencies - python: 2.7 env: From c03213822e3a022a430a2d19621a81d0531189aa Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 28 Jun 2019 19:24:40 -0400 Subject: [PATCH 143/689] MAINT: Add pyproject.toml to give setuptools hint --- MANIFEST.in | 2 +- pyproject.toml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 pyproject.toml diff --git a/MANIFEST.in b/MANIFEST.in index ec2f348805..439af883cd 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include AUTHOR COPYING Makefile* MANIFEST.in setup* README.* +include AUTHOR COPYING Makefile* MANIFEST.in setup* README.* pyproject.toml include Changelog TODO requirements.txt recursive-include doc * recursive-include bin * diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..3e61cc01f4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +# Setuptools version should match setup.py; wheel because pip will insert it noisily +requires = ["setuptools >= 30.3.0", "wheel"] From 0282bb957f990100aaf39d19bd3e9c7606b4fe6d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 28 Jun 2019 19:25:09 -0400 Subject: [PATCH 144/689] MAINT: Add wheel requirement on-the-fly, improve comments --- setup.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 5e0468dd84..3fb24126a8 100755 --- a/setup.py +++ b/setup.py @@ -9,17 +9,27 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Build helper.""" +import sys import os -import setuptools +from setuptools import setup -# Commit hash writing +# nisext is nipy setup extensions, which we're mostly moving away from +# get_comrec_build stores the current commit in COMMIT_HASH.txt at build time +# read_vars_from evaluates a python file and makes variables available from nisext.sexts import get_comrec_build, read_vars_from INFO = read_vars_from(os.path.join('nibabel', 'info.py')) +# Give setuptools a hint to complain if it's too old a version +# 30.3.0 allows us to put most metadata in setup.cfg +# Should match pyproject.toml +SETUP_REQUIRES = ['setuptools >= 30.3.0'] +# This enables setuptools to install wheel on-the-fly +SETUP_REQUIRES += ['wheel'] if 'bdist_wheel' in sys.argv else [] + if __name__ == "__main__": - setuptools.setup(name='nibabel', - version=INFO.VERSION, - setup_requires=['setuptools>=30.3.0'], - cmdclass={'build_py': get_comrec_build('nibabel')}) + setup(name='nibabel', + version=INFO.VERSION, + setup_requires=SETUP_REQUIRES, + cmdclass={'build_py': get_comrec_build('nibabel')}) From fe0898f079018626a6932efee8c1816b833d7bf7 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 4 Jul 2019 13:45:34 +0100 Subject: [PATCH 145/689] ENH: raise error if CIFTI-2 header file has different shape as data --- nibabel/cifti2/cifti2.py | 39 +++++++++++++++++-- nibabel/cifti2/tests/test_cifti2.py | 8 +++- nibabel/cifti2/tests/test_new_cifti2.py | 51 ++++++++++++++++++------- 3 files changed, 80 insertions(+), 18 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 0f8108b235..5f9fc6dac4 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1209,6 +1209,36 @@ def _to_xml_element(self): mat.append(mim._to_xml_element()) return mat + def get_axis(self, index): + ''' + Generates the Cifti2 axis for a given dimension + + Parameters + ---------- + index : int + Dimension for which we want to obtain the mapping. + + Returns + ------- + axis : :class:`.cifti2_axes.Axis` + ''' + from . import cifti2_axes + return cifti2_axes.from_index_mapping(self.get_index_map(index)) + + def get_data_shape(self): + """ + Returns data shape expected based on the CIFTI-2 header + """ + from . import cifti2_axes + if len(self.mapped_indices) == 0: + return () + base_shape = [-1 for _ in range(max(self.mapped_indices) + 1)] + for mim in self: + size = len(cifti2_axes.from_index_mapping(mim)) + for idx in mim.applies_to_matrix_dimension: + base_shape[idx] = size + return tuple(base_shape) + class Cifti2Header(FileBasedHeader, xml.XmlSerializable): ''' Class for CIFTI-2 header extension ''' @@ -1279,8 +1309,7 @@ def get_axis(self, index): ------- axis : :class:`.cifti2_axes.Axis` ''' - from . import cifti2_axes - return cifti2_axes.from_index_mapping(self.matrix.get_index_map(index)) + return self.matrix.get_axis(index) @classmethod def from_axes(cls, axes): @@ -1426,6 +1455,10 @@ def to_file_map(self, file_map=None): header = self._nifti_header extension = Cifti2Extension(content=self.header.to_xml()) header.extensions.append(extension) + if header.get_data_shape() != self.header.matrix.get_data_shape(): + raise ValueError("Dataobj shape {} does not match shape expected from CIFTI-2 header {}".format( + self._dataobj.shape, self.header.matrix.get_data_shape() + )) # if intent code is not set, default to unknown CIFTI if header.get_intent()[0] == 'none': header.set_intent('NIFTI_INTENT_CONNECTIVITY_UNKNOWN') @@ -1438,7 +1471,7 @@ def to_file_map(self, file_map=None): img.to_file_map(file_map or self.file_map) def update_headers(self): - ''' Harmonize CIFTI-2 and NIfTI headers with image data + ''' Harmonize NIfTI headers with image data >>> import numpy as np >>> data = np.zeros((2,3,4)) diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 6054c126b0..8a8ffd6fc2 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -7,7 +7,7 @@ from nibabel import cifti2 as ci from nibabel.nifti2 import Nifti2Header -from nibabel.cifti2.cifti2 import _float_01, _value_if_klass, Cifti2HeaderError +from nibabel.cifti2.cifti2 import _float_01, _value_if_klass, Cifti2HeaderError, Cifti2NamedMap, Cifti2MatrixIndicesMap from nose.tools import assert_true, assert_equal, assert_raises, assert_is_none @@ -358,4 +358,10 @@ class TestCifti2ImageAPI(_TDA): standard_extension = '.nii' def make_imaker(self, arr, header=None, ni_header=None): + for idx, sz in enumerate(arr.shape): + maps = [Cifti2NamedMap(str(value)) for value in range(sz)] + mim = ci.Cifti2MatrixIndicesMap( + (idx, ), 'CIFTI_INDEX_TYPE_SCALARS', maps=maps + ) + header.matrix.append(mim) return lambda: self.image_maker(arr.copy(), header, ni_header) diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index 01bc742a22..79e79ba2d2 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -12,11 +12,12 @@ from nibabel import cifti2 as ci from nibabel.tmpdirs import InTemporaryDirectory -from nose.tools import assert_true, assert_equal +from nose.tools import assert_true, assert_equal, assert_raises affine = [[-1.5, 0, 0, 90], [0, 1.5, 0, -85], - [0, 0, 1.5, -71]] + [0, 0, 1.5, -71], + [0, 0, 0, 1.]] dimensions = (120, 83, 78) @@ -234,7 +235,7 @@ def test_dtseries(): matrix.append(series_map) matrix.append(geometry_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(13, 9) + data = np.random.randn(13, 10) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_DENSE_SERIES') @@ -257,7 +258,7 @@ def test_dscalar(): matrix.append(scalar_map) matrix.append(geometry_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(2, 9) + data = np.random.randn(2, 10) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_DENSE_SCALARS') @@ -279,7 +280,7 @@ def test_dlabel(): matrix.append(label_map) matrix.append(geometry_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(2, 9) + data = np.random.randn(2, 10) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_DENSE_LABELS') @@ -299,7 +300,7 @@ def test_dconn(): matrix = ci.Cifti2Matrix() matrix.append(mapping) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(9, 9) + data = np.random.randn(10, 10) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_DENSE') @@ -322,7 +323,7 @@ def test_ptseries(): matrix.append(series_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(13, 3) + data = np.random.randn(13, 4) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_SERIES') @@ -344,7 +345,7 @@ def test_pscalar(): matrix.append(scalar_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(2, 3) + data = np.random.randn(2, 4) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_SCALAR') @@ -366,7 +367,7 @@ def test_pdconn(): matrix.append(geometry_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(2, 3) + data = np.random.randn(10, 4) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_DENSE') @@ -388,7 +389,7 @@ def test_dpconn(): matrix.append(parcel_map) matrix.append(geometry_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(2, 3) + data = np.random.randn(4, 10) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_DENSE_PARCELLATED') @@ -410,7 +411,7 @@ def test_plabel(): matrix.append(label_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(2, 3) + data = np.random.randn(2, 4) img = ci.Cifti2Image(data, hdr) with InTemporaryDirectory(): @@ -429,7 +430,7 @@ def test_pconn(): matrix = ci.Cifti2Matrix() matrix.append(mapping) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(3, 3) + data = np.random.randn(4, 4) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED') @@ -453,7 +454,7 @@ def test_pconnseries(): matrix.append(parcel_map) matrix.append(series_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(3, 3, 13) + data = np.random.randn(4, 4, 13) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_' 'PARCELLATED_SERIES') @@ -479,7 +480,7 @@ def test_pconnscalar(): matrix.append(parcel_map) matrix.append(scalar_map) hdr = ci.Cifti2Header(matrix) - data = np.random.randn(3, 3, 13) + data = np.random.randn(4, 4, 2) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_' 'PARCELLATED_SCALAR') @@ -496,3 +497,25 @@ def test_pconnscalar(): check_parcel_map(img2.header.matrix.get_index_map(0)) check_scalar_map(img2.header.matrix.get_index_map(2)) del img2 + + +def test_wrong_shape(): + scalar_map = create_scalar_map((0, )) + brain_model_map = create_geometry_map((1, )) + + matrix = ci.Cifti2Matrix() + matrix.append(scalar_map) + matrix.append(brain_model_map) + hdr = ci.Cifti2Header(matrix) + + # correct shape is (2, 10) + for data in ( + np.random.randn(1, 11), + np.random.randn(2, 10, 1), + np.random.randn(1, 2, 10), + np.random.randn(3, 10), + np.random.randn(2, 9), + ): + img = ci.Cifti2Image(data, hdr) + assert_raises(ValueError, img.to_file_map) + From 61e6b6a9c4a1a791528416da251591cd6a164c7b Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 4 Jul 2019 13:45:44 +0100 Subject: [PATCH 146/689] correct typo --- nibabel/batteryrunners.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/batteryrunners.py b/nibabel/batteryrunners.py index be3977111a..b77c8b8858 100644 --- a/nibabel/batteryrunners.py +++ b/nibabel/batteryrunners.py @@ -141,7 +141,7 @@ def check_only(self, obj): ------- reports : sequence sequence of report objects reporting on result of running - checks (withou fixes) on `obj` + checks (without fixes) on `obj` ''' reports = [] for check in self._checks: From aba96da7f2b9e903934faa1e2b134805debd8ef2 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 4 Jul 2019 13:53:38 +0100 Subject: [PATCH 147/689] ENH: raise warning if creating Cifti2Image with incorrect shape --- nibabel/cifti2/cifti2.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 5f9fc6dac4..a26d120f87 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -30,6 +30,7 @@ from ..nifti2 import Nifti2Image, Nifti2Header from ..arrayproxy import reshape_dataobj from ..keywordonly import kw_only_meth +from warnings import warn def _float_01(val): @@ -1374,12 +1375,19 @@ def __init__(self, super(Cifti2Image, self).__init__(dataobj, header=header, extra=extra, file_map=file_map) self._nifti_header = Nifti2Header.from_header(nifti_header) + # if NIfTI header not specified, get data type from input array if nifti_header is None: if hasattr(dataobj, 'dtype'): self._nifti_header.set_data_dtype(dataobj.dtype) self.update_headers() + if self._nifti_header.get_data_shape() != self.header.matrix.get_data_shape(): + warn("Dataobj shape {} does not match shape expected from CIFTI-2 header {}".format( + self._dataobj.shape, self.header.matrix.get_data_shape() + )) + + @property def nifti_header(self): return self._nifti_header From 5888a12a969a76fadb6994ef1929aae39c47e326 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 4 Jul 2019 13:56:28 +0100 Subject: [PATCH 148/689] STYLE: removed extra empty line --- nibabel/cifti2/cifti2.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index a26d120f87..52d3a92415 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1387,7 +1387,6 @@ def __init__(self, self._dataobj.shape, self.header.matrix.get_data_shape() )) - @property def nifti_header(self): return self._nifti_header From a1bfa76c838450254183239fabe5ae280eea4668 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Thu, 4 Jul 2019 14:07:12 +0100 Subject: [PATCH 149/689] TEST: add tests to check that warnings are raised --- nibabel/cifti2/tests/test_cifti2.py | 4 ++-- nibabel/cifti2/tests/test_new_cifti2.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 8a8ffd6fc2..8f85b62041 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -7,7 +7,7 @@ from nibabel import cifti2 as ci from nibabel.nifti2 import Nifti2Header -from nibabel.cifti2.cifti2 import _float_01, _value_if_klass, Cifti2HeaderError, Cifti2NamedMap, Cifti2MatrixIndicesMap +from nibabel.cifti2.cifti2 import _float_01, _value_if_klass, Cifti2HeaderError from nose.tools import assert_true, assert_equal, assert_raises, assert_is_none @@ -359,7 +359,7 @@ class TestCifti2ImageAPI(_TDA): def make_imaker(self, arr, header=None, ni_header=None): for idx, sz in enumerate(arr.shape): - maps = [Cifti2NamedMap(str(value)) for value in range(sz)] + maps = [ci.Cifti2NamedMap(str(value)) for value in range(sz)] mim = ci.Cifti2MatrixIndicesMap( (idx, ), 'CIFTI_INDEX_TYPE_SCALARS', maps=maps ) diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index 79e79ba2d2..2a157ca7fb 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -13,6 +13,7 @@ from nibabel.tmpdirs import InTemporaryDirectory from nose.tools import assert_true, assert_equal, assert_raises +from nibabel.testing import clear_and_catch_warnings, error_warnings, suppress_warnings affine = [[-1.5, 0, 0, 90], [0, 1.5, 0, -85], @@ -516,6 +517,10 @@ def test_wrong_shape(): np.random.randn(3, 10), np.random.randn(2, 9), ): - img = ci.Cifti2Image(data, hdr) + with clear_and_catch_warnings(): + with error_warnings(): + assert_raises(UserWarning, ci.Cifti2Image, data, hdr) + with suppress_warnings(): + img = ci.Cifti2Image(data, hdr) assert_raises(ValueError, img.to_file_map) From 907231367c1812fc57a798e119f3303f6226ee91 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 5 Jul 2019 11:46:46 +0100 Subject: [PATCH 150/689] Fixed style errors --- nibabel/cifti2/cifti2.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 52d3a92415..d2448c8939 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1463,9 +1463,10 @@ def to_file_map(self, file_map=None): extension = Cifti2Extension(content=self.header.to_xml()) header.extensions.append(extension) if header.get_data_shape() != self.header.matrix.get_data_shape(): - raise ValueError("Dataobj shape {} does not match shape expected from CIFTI-2 header {}".format( - self._dataobj.shape, self.header.matrix.get_data_shape() - )) + raise ValueError( + "Dataobj shape {} does not match shape expected from CIFTI-2 header {}".format( + self._dataobj.shape, self.header.matrix.get_data_shape() + )) # if intent code is not set, default to unknown CIFTI if header.get_intent()[0] == 'none': header.set_intent('NIFTI_INTENT_CONNECTIVITY_UNKNOWN') From 875f93c4e5a6b1cedb34cebfadcd76dcaa1c99bf Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 12 Jul 2019 09:26:48 -0700 Subject: [PATCH 151/689] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- nibabel/cifti2/cifti2.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index d2448c8939..c68208cfee 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1233,7 +1233,7 @@ def get_data_shape(self): from . import cifti2_axes if len(self.mapped_indices) == 0: return () - base_shape = [-1 for _ in range(max(self.mapped_indices) + 1)] + base_shape = [-1] * (max(self.mapped_indices) + 1) for mim in self: size = len(cifti2_axes.from_index_mapping(mim)) for idx in mim.applies_to_matrix_dimension: @@ -1382,7 +1382,7 @@ def __init__(self, self._nifti_header.set_data_dtype(dataobj.dtype) self.update_headers() - if self._nifti_header.get_data_shape() != self.header.matrix.get_data_shape(): + if self._dataobj.shape != self.header.matrix.get_data_shape(): warn("Dataobj shape {} does not match shape expected from CIFTI-2 header {}".format( self._dataobj.shape, self.header.matrix.get_data_shape() )) @@ -1462,7 +1462,7 @@ def to_file_map(self, file_map=None): header = self._nifti_header extension = Cifti2Extension(content=self.header.to_xml()) header.extensions.append(extension) - if header.get_data_shape() != self.header.matrix.get_data_shape(): + if self._dataobj.shape != self.header.matrix.get_data_shape(): raise ValueError( "Dataobj shape {} does not match shape expected from CIFTI-2 header {}".format( self._dataobj.shape, self.header.matrix.get_data_shape() From d0a484b15be8bf1b398eb9fcc1b6241205fa66d3 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 12 Jul 2019 09:34:30 -0700 Subject: [PATCH 152/689] Set undefined dimensions to size None --- nibabel/cifti2/cifti2.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index c68208cfee..fdd52b871f 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1229,11 +1229,13 @@ def get_axis(self, index): def get_data_shape(self): """ Returns data shape expected based on the CIFTI-2 header + + Any dimensions omitted in the CIFIT-2 header will be given a default size of None. """ from . import cifti2_axes if len(self.mapped_indices) == 0: return () - base_shape = [-1] * (max(self.mapped_indices) + 1) + base_shape = [None] * (max(self.mapped_indices) + 1) for mim in self: size = len(cifti2_axes.from_index_mapping(mim)) for idx in mim.applies_to_matrix_dimension: From c531421827f81b627fcafa5184d0d5ced2549744 Mon Sep 17 00:00:00 2001 From: Michiel Cottaar Date: Fri, 12 Jul 2019 09:47:01 -0700 Subject: [PATCH 153/689] Update nibabel/cifti2/cifti2.py Co-Authored-By: Chris Markiewicz --- nibabel/cifti2/cifti2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index fdd52b871f..104c9396cd 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -1230,7 +1230,7 @@ def get_data_shape(self): """ Returns data shape expected based on the CIFTI-2 header - Any dimensions omitted in the CIFIT-2 header will be given a default size of None. + Any dimensions omitted in the CIFTI-2 header will be given a default size of None. """ from . import cifti2_axes if len(self.mapped_indices) == 0: From 1a0d949aae7d140d1769bf8ab8d1de8d0eef86bd Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 27 Jul 2019 11:55:07 -0400 Subject: [PATCH 154/689] TEST: Add multiframe ECAT file for testing --- .gitmodules | 2 +- nibabel-data/nipy-ecattest | 2 +- nibabel/tests/test_ecat_data.py | 14 +++++++++++++- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index 7ff8d61885..db0afa268e 100644 --- a/.gitmodules +++ b/.gitmodules @@ -6,7 +6,7 @@ url = git://github.com/matthew-brett/nitest-minc2.git [submodule "nipy-ecattest"] path = nibabel-data/nipy-ecattest - url = https://github.com/freec84/nipy-ecattest + url = https://github.com/effigies/nipy-ecattest [submodule "nibabel-data/nitest-freesurfer"] path = nibabel-data/nitest-freesurfer url = https://bitbucket.org/nipy/nitest-freesurfer.git diff --git a/nibabel-data/nipy-ecattest b/nibabel-data/nipy-ecattest index 12c9ee6d18..9a0a592057 160000 --- a/nibabel-data/nipy-ecattest +++ b/nibabel-data/nipy-ecattest @@ -1 +1 @@ -Subproject commit 12c9ee6d18d50235e3453897a4be60c19bf126c0 +Subproject commit 9a0a592057bc16894c20c77b03ea1ebb5f8ca8f9 diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index f0c9d70b3e..dce96646e8 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -37,7 +37,7 @@ class TestNegatives(object): # unit: 1/cm ) - @needs_nibabel_data('nitest-minc2') + @needs_nibabel_data('nipy-ecattest') def test_load(self): # Check highest level load of minc works img = self.opener(self.example_params['fname']) @@ -50,3 +50,15 @@ def test_load(self): assert_almost_equal(data.min(), self.example_params['min'], 4) assert_almost_equal(data.max(), self.example_params['max'], 4) assert_almost_equal(data.mean(), self.example_params['mean'], 4) + + +class TestMultiframe(TestNegatives): + example_params = dict( + fname=os.path.join(ECAT_TEST_PATH, 'ECAT7_testcase_multiframe.v'), + shape=(256, 256, 207, 3), + type=np.int16, + # Zeroed out image + min=0.0, + max=29170.67905, + mean=121.454, + ) From 244bc37a89f8c924656c21e22c1991513128c0a1 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 28 Jul 2019 08:12:03 -0400 Subject: [PATCH 155/689] FIX: Minor ECAT cleanups --- nibabel/ecat.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index e8b881bd66..ca9c20328a 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -561,7 +561,7 @@ def _check_affines(self): i = iter(affs) first = i.next() for item in i: - if not np.all(first == item): + if not np.allclose(first, item): return False return True @@ -760,7 +760,7 @@ def __init__(self, dataobj, affine, header, Parameters ---------- - dataabj : array-like + dataobj : array-like image data affine : None or (4,4) array-like homogeneous affine giving relationship between voxel coords and @@ -811,6 +811,7 @@ def __init__(self, dataobj, affine, header, file_map = self.__class__.make_file_map() self.file_map = file_map self._data_cache = None + self._fdata_cache = None @property def affine(self): From 5c9ebb28505c8bf31dc9706da65204dd92dc93ed Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 28 Jul 2019 21:16:26 -0400 Subject: [PATCH 156/689] DOC: Update changelog for upcoming 2.5.0 release --- Changelog | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/Changelog b/Changelog index e2f621e4cb..a7a4c6573c 100644 --- a/Changelog +++ b/Changelog @@ -25,8 +25,36 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. +2.5.0 (To be decided) +===================== + +Enhancements +------------ +* Add SerializableImage class with to/from_bytes methods (pr/644) (CM, + reviewed by MB) +* Check CIFTI-2 data shape matches shape described by header (pr/774) + (Michiel Cottaar, reviewed by CM) + +Bug fixes +--------- +* Handle stricter numpy casting rules in tests (pr/768) (CM) + reviewed by PM) + +Maintenance +----------- +* Fix CodeCov paths on Appveyor for more accurate coverage (pr/769) (CM) +* Move to setuptools and reduce use ``nisext`` functions (pr/764) (CM, + reviewed by YOH) + +API changes and deprecations +---------------------------- +* Effect threatened warnings and set some deprecation timelines (pr/755) (CM) + * Trackvis methods now default to v2 formats + * ``nibabel.trackvis`` scheduled for removal in nibabel 4.0 + * ``nibabel.minc`` and ``nibabel.MincImage`` will be removed in nibabel 3.0 + 2.4.1 (Monday 27 May 2019) -============================ +========================== Contributions from Egor Pafilov, Jath Palasubramaniam, Richard Nemec, and Dave Allured. From ced392b7ed3fc903cc35722f6b9ae4001b65fb49 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 28 Jul 2019 21:28:06 -0400 Subject: [PATCH 157/689] MAINT: Version 2.5.0 --- nibabel/info.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index 4fa1b44b1c..fcb912734c 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -19,8 +19,8 @@ _version_major = 2 _version_minor = 5 _version_micro = 0 -_version_extra = 'dev' -# _version_extra = '' +# _version_extra = 'dev' +_version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From 71f29d304ad112e8d17bf423294dce58b627f6d5 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 29 Jul 2019 22:58:20 -0400 Subject: [PATCH 158/689] FIX: Set TRK byte order to < before filling values --- nibabel/streamlines/trk.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/streamlines/trk.py b/nibabel/streamlines/trk.py index 7ff80dc59f..3100dfe5ec 100644 --- a/nibabel/streamlines/trk.py +++ b/nibabel/streamlines/trk.py @@ -269,7 +269,8 @@ def is_correct_format(cls, fileobj): def _default_structarr(cls): """ Return an empty compliant TRK header as numpy structured array """ - st_arr = np.zeros((), dtype=header_2_dtype) + # Enforce little-endian byte order for header + st_arr = np.zeros((), dtype=header_2_dtype).newbyteorder('<') # Default values st_arr[Field.MAGIC_NUMBER] = cls.MAGIC_NUMBER @@ -395,8 +396,7 @@ def save(self, fileobj): pointing to TRK file (and ready to write from the beginning of the TRK header data). """ - # Enforce little-endian byte order for header - header = self._default_structarr().newbyteorder('<') + header = self._default_structarr() # Override hdr's fields by those contained in `header`. for k, v in self.header.items(): From 8b00fd07485c24babd6d2454e0d96db25c085b59 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 29 Jul 2019 23:19:52 -0400 Subject: [PATCH 159/689] PY3: Use next function on iterator while checking ECAT affines Fix suggested by Andrew Crabb (@idoimaging) in https://github.com/nipy/nibabel/issues/776#issue-473462718 --- nibabel/ecat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index ca9c20328a..fef2741ef8 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -559,7 +559,7 @@ def _check_affines(self): affs = [self.get_frame_affine(i) for i in range(nframes)] if affs: i = iter(affs) - first = i.next() + first = next(i) for item in i: if not np.allclose(first, item): return False From 2434adb2092b20fbe4a74e03e08c27f5bba42ff3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 29 Jul 2019 23:08:27 -0400 Subject: [PATCH 160/689] DOCTEST: Display system endianness-neutral representation of >> fname = os.path.join(datadir, 'example4d+orig.HEAD') >>> header = AFNIHeader(parse_AFNI_header(fname)) - >>> header.get_data_dtype() - dtype('int16') + >>> header.get_data_dtype().str + '>> header.get_zooms() (3.0, 3.0, 3.0, 3.0) >>> header.get_data_shape() From d5e90dce0792c95af234da8ebbcf91f27ac0640f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 15:46:54 -0400 Subject: [PATCH 161/689] TEST: Use package-wide setup and teardown to adjust numpy print options --- nibabel/__init__.py | 30 +++++++++++++++++++++--------- nibabel/affines.py | 1 - nibabel/casting.py | 1 - nibabel/nicom/dwiparams.py | 1 - nibabel/nifti1.py | 1 - nibabel/quaternions.py | 1 - 6 files changed, 21 insertions(+), 14 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 20f1aafefc..5f571ad12a 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -35,16 +35,28 @@ For more detailed information see the :ref:`manual`. """ - -def setup_test(): - """ Set numpy print options to "legacy" for new versions of numpy - - If imported into a file, nosetest will run this before any doctests. - """ - import numpy +# Package-wide test setup and teardown +# Numpy changed print options in 1.14; we can update docstrings and remove +# these when our minimum for building docs exceeds that +_save_printopts = None + +def setup_package(): + """ Set numpy print style to legacy="1.13" for newer versions of numpy """ + import nibabel as nb + import numpy as np from distutils.version import LooseVersion - if LooseVersion(numpy.__version__) >= LooseVersion('1.14'): - numpy.set_printoptions(legacy="1.13") + if nb._save_printopts is None: + nb._save_printopts = np.get_printoptions().get('legacy') + if LooseVersion(np.__version__) >= LooseVersion('1.14'): + np.set_printoptions(legacy="1.13") + +def teardown_package(): + """ Reset print options when tests finish """ + import nibabel as nb + import numpy as np + if nb._save_printopts is not None: + np.set_printoptions(legacy=nb._save_printopts) + nb._save_printopts = None # module imports diff --git a/nibabel/affines.py b/nibabel/affines.py index 057233e454..07154089a1 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -6,7 +6,6 @@ import numpy as np from six.moves import reduce -from . import setup_test # noqa class AffineError(ValueError): diff --git a/nibabel/casting.py b/nibabel/casting.py index 3709ee1dea..89be788da5 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -8,7 +8,6 @@ from platform import processor, machine import numpy as np -from . import setup_test # noqa class CastingError(Exception): diff --git a/nibabel/nicom/dwiparams.py b/nibabel/nicom/dwiparams.py index 1fda89b0da..e9d05c0d57 100644 --- a/nibabel/nicom/dwiparams.py +++ b/nibabel/nicom/dwiparams.py @@ -21,7 +21,6 @@ ''' import numpy as np import numpy.linalg as npl -from .. import setup_test as setup_module # noqa def B2q(B, tol=None): diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index a050195234..c2d409e81a 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -28,7 +28,6 @@ from .spm99analyze import SpmAnalyzeHeader from .casting import have_binary128 from .pydicom_compat import have_dicom, pydicom as pdcm -from . import setup_test # noqa # nifti1 flat header definition for Analyze-like first 348 bytes # first number in comments indicates offset in file header in bytes diff --git a/nibabel/quaternions.py b/nibabel/quaternions.py index f9318a93f2..adc2367238 100644 --- a/nibabel/quaternions.py +++ b/nibabel/quaternions.py @@ -27,7 +27,6 @@ import math import numpy as np -from . import setup_test # noqa MAX_FLOAT = np.maximum_sctype(np.float) FLOAT_EPS = np.finfo(np.float).eps From 4d7dd36854b6b519fd28e363d704ad5a1acb17d7 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 19:42:13 -0400 Subject: [PATCH 162/689] MAINT: Use a more general _test_state structure for setup/teardown --- nibabel/__init__.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 5f571ad12a..f407b9ed39 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -36,27 +36,27 @@ """ # Package-wide test setup and teardown -# Numpy changed print options in 1.14; we can update docstrings and remove -# these when our minimum for building docs exceeds that -_save_printopts = None +_test_states = { + # Numpy changed print options in 1.14; we can update docstrings and remove + # these when our minimum for building docs exceeds that + 'legacy_printopt': None, + } def setup_package(): """ Set numpy print style to legacy="1.13" for newer versions of numpy """ - import nibabel as nb import numpy as np from distutils.version import LooseVersion - if nb._save_printopts is None: - nb._save_printopts = np.get_printoptions().get('legacy') if LooseVersion(np.__version__) >= LooseVersion('1.14'): + if _test_states.get('legacy_printopt') is None: + _test_states['legacy_printopt'] = np.get_printoptions().get('legacy') np.set_printoptions(legacy="1.13") def teardown_package(): """ Reset print options when tests finish """ - import nibabel as nb import numpy as np - if nb._save_printopts is not None: - np.set_printoptions(legacy=nb._save_printopts) - nb._save_printopts = None + if _test_states.get('legacy_printopt') is not None: + np.set_printoptions(legacy=_test_states['legacy_printopt']) + _test_states['legacy_printopt'] = None # module imports From 4cccd86cd936436110c539a0946513ce6fa33fa0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 31 Jul 2019 10:42:45 -0400 Subject: [PATCH 163/689] Apply suggestions from code review Co-Authored-By: Yaroslav Halchenko --- nibabel/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index f407b9ed39..20fdad3469 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -55,8 +55,7 @@ def teardown_package(): """ Reset print options when tests finish """ import numpy as np if _test_states.get('legacy_printopt') is not None: - np.set_printoptions(legacy=_test_states['legacy_printopt']) - _test_states['legacy_printopt'] = None + np.set_printoptions(legacy=_test_states.pop('legacy_printopt')) # module imports From 927b6ce8cb459e1dc1173a4da9722e2904c1b5d5 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 31 Jul 2019 07:18:24 -0400 Subject: [PATCH 164/689] ENH: Pass endianness as parameter to TrkFile._default_structarr --- nibabel/streamlines/trk.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/nibabel/streamlines/trk.py b/nibabel/streamlines/trk.py index 3100dfe5ec..805b44edcf 100644 --- a/nibabel/streamlines/trk.py +++ b/nibabel/streamlines/trk.py @@ -13,7 +13,7 @@ from nibabel.openers import Opener from nibabel.py3k import asstr -from nibabel.volumeutils import (native_code, swapped_code) +from nibabel.volumeutils import (native_code, swapped_code, endian_codes) from nibabel.orientations import (aff2axcodes, axcodes2ornt) from .array_sequence import create_arraysequences_from_generator @@ -266,11 +266,14 @@ def is_correct_format(cls, fileobj): return magic_number == cls.MAGIC_NUMBER @classmethod - def _default_structarr(cls): + def _default_structarr(cls, endianness=None): """ Return an empty compliant TRK header as numpy structured array """ - # Enforce little-endian byte order for header - st_arr = np.zeros((), dtype=header_2_dtype).newbyteorder('<') + dt = header_2_dtype + if endianness is not None: + endianness = endian_codes[endianness] + dt = dt.newbyteorder(endianness) + st_arr = np.zeros((), dtype=dt) # Default values st_arr[Field.MAGIC_NUMBER] = cls.MAGIC_NUMBER @@ -284,10 +287,10 @@ def _default_structarr(cls): return st_arr @classmethod - def create_empty_header(cls): + def create_empty_header(cls, endianness=None): """ Return an empty compliant TRK header as dict """ - st_arr = cls._default_structarr() + st_arr = cls._default_structarr(endianness) return dict(zip(st_arr.dtype.names, st_arr.tolist())) @classmethod @@ -396,7 +399,8 @@ def save(self, fileobj): pointing to TRK file (and ready to write from the beginning of the TRK header data). """ - header = self._default_structarr() + # Enforce little-endian byte order for header + header = self._default_structarr(endianness='little') # Override hdr's fields by those contained in `header`. for k, v in self.header.items(): From 580f75c46881ae1dfa22cf0d06fbdaa75434e494 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 1 Aug 2019 20:33:39 -0400 Subject: [PATCH 165/689] DOC: Update changelog --- Changelog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Changelog b/Changelog index a7a4c6573c..3ca4062a69 100644 --- a/Changelog +++ b/Changelog @@ -28,6 +28,8 @@ References like "pr/298" refer to github pull request numbers. 2.5.0 (To be decided) ===================== +Thanks for the test ECAT file and fix provided by Andrew Crabb. + Enhancements ------------ * Add SerializableImage class with to/from_bytes methods (pr/644) (CM, @@ -39,12 +41,16 @@ Bug fixes --------- * Handle stricter numpy casting rules in tests (pr/768) (CM) reviewed by PM) +* TRK header fields flipped in files written on big-endian systems + (pr/782) (CM, reviewed by YOH, MB) +* Load multiframe ECAT images with Python 3 (CM and Andrew Crabb) Maintenance ----------- * Fix CodeCov paths on Appveyor for more accurate coverage (pr/769) (CM) * Move to setuptools and reduce use ``nisext`` functions (pr/764) (CM, reviewed by YOH) +* Better handle test setup/teardown (pr/785) (CM, reviewed by YOH) API changes and deprecations ---------------------------- From 3994fa5d3b14d507746ce625ac4a13ce03bac0c9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 2 Aug 2019 06:23:35 -0400 Subject: [PATCH 166/689] MAINT: Update mailmap, Zenodo --- .mailmap | 1 + .zenodo.json | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/.mailmap b/.mailmap index 54f6514527..ea15651170 100644 --- a/.mailmap +++ b/.mailmap @@ -45,6 +45,7 @@ Mathias Goncalves mathiasg Matthew Cieslak Matt Cieslak Michael Hanke Michael Hanke +Michiel Cottaar Michiel Cottaar Ly Nguyen lxn2 Oliver P. Hinds ohinds Paul McCarthy Paul McCarthy diff --git a/.zenodo.json b/.zenodo.json index 562d42e00c..16555380a4 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -197,6 +197,9 @@ { "name": "Fauber, Bennet" }, + { + "name": "Panfilov, Egor" + }, { "affiliation": "McGill University", "name": "Poline, Jean-Baptiste", @@ -249,6 +252,9 @@ "name": "Gonzalez, Ivan", "orcid": "0000-0002-6451-6909" }, + { + "name": "Palasubramaniam, Jath" + }, { "name": "Lecher, Justin" }, From 09af5484b584a1d7e5eaa2922b65132917f454a8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 4 Aug 2019 09:10:42 -0400 Subject: [PATCH 167/689] REL: 2.5.0 --- Changelog | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Changelog b/Changelog index 3ca4062a69..832e7d0843 100644 --- a/Changelog +++ b/Changelog @@ -25,8 +25,11 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. -2.5.0 (To be decided) -===================== +2.5.0 (Sunday 4 August 2019) +============================ + +The 2.5.x series is the last with support for either Python 2 or Python 3.4. +Extended support for this series 2.5 will last through December 2020. Thanks for the test ECAT file and fix provided by Andrew Crabb. From 6d4660cf33c063282ad461b76a00de3a93b47276 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 5 Aug 2019 09:50:23 -0400 Subject: [PATCH 168/689] MAINT: 2.5.1-dev --- nibabel/info.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index fcb912734c..12d0dc9d7e 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -18,9 +18,9 @@ # (pre-release) version. _version_major = 2 _version_minor = 5 -_version_micro = 0 -# _version_extra = 'dev' -_version_extra = '' +_version_micro = 1 +_version_extra = 'dev' +# _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From cee6bf3e9a028012728b9a292a9a3a5d8f52f87d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 5 Aug 2019 17:11:35 -0700 Subject: [PATCH 169/689] MAINT: 3.0.0-dev --- nibabel/info.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index fcb912734c..39b77421db 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -16,11 +16,11 @@ # # We usually use `dev` as `_version_extra` to label this as a development # (pre-release) version. -_version_major = 2 -_version_minor = 5 +_version_major = 3 +_version_minor = 0 _version_micro = 0 -# _version_extra = 'dev' -_version_extra = '' +_version_extra = 'dev' +# _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From 2806a57afa1005f8d9b8a87eabc9f7c2b9d390d1 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 17 Jul 2019 22:01:27 -0400 Subject: [PATCH 170/689] MAINT: Add community guidelines --- .github/CODE_OF_CONDUCT.md | 106 +++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 .github/CODE_OF_CONDUCT.md diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..b793f4611b --- /dev/null +++ b/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,106 @@ +# Community Guidelines + +Nibabel is a [NIPY](https://nipy.org) project, and we strive to adhere to the +[NIPY Community Code](https://nipy.org/conduct.html), reproduced below. + +The NIPY community is a community of practice devoted to the use of the Python programming language +in the analysis of neuroimaging data. The following code of conduct is a guideline for our behavior +as we participate in this community. + +It is based on, and heavily inspired by a reading of the Python community code of conduct, the +Apache foundation code of conduct, the Debian code of conduct, and the Ten Principles of Burning +Man. + +## The code of conduct for the NIPY community + +The Neuroimaging in Python (NIPY) community is made up of members with a diverse set of skills, +personalities, background, and experiences. We welcome these differences because they are the +source of diverse ideas, solutions and decisions about our work. Decisions we make affect users, +colleagues, and through scientific results, the general public. We take these consequences +seriously when making decisions. When you are working with members of the community, we encourage +you to follow these guidelines, which help steer our interactions and help keep NIPY a positive, +successful, and growing community. + +### A member of the NIPY community is: + +#### Open + +Members of the community are open to collaboration. Be it on the reuse of data, on the +implementation of methods, on finding technical solutions, establishing best practices, and +otherwise. We are accepting of all who wish to take part in our activities, fostering an +environment where anyone can participate and everyone can make a difference. + +#### Be collaborative! + +Our work will be used by other people, and in turn we will depend on the work of others. When we +make something for the benefit of others, we are willing to explain to others how it works, so that +they can build on the work to make it even better. We are willing to provide constructive criticism +on the work of others and accept criticism of our own work, as the experiences and skill sets of +other members contribute to the whole of our efforts. + +#### Be inquisitive! + +Nobody knows everything! Asking questions early avoids many problems later, so questions are +encouraged, though they may be directed to the appropriate forum. Those who are asked should be +responsive and helpful, within the context of our shared goal of improving neuroimaging practice. + +#### Considerate + +Members of the community are considerate of their peers. We are thoughtful when addressing the +efforts of others, keeping in mind that often-times the labor was completed simply for the good of +the community. We are attentive in our communications, whether in person or online, and we are +tactful when approaching differing views. + +#### Be careful in the words that you choose: + +We value courtesy, kindness and inclusiveness in all our interactions. Therefore, we take +responsibility for our own speech. In particular, we avoid: + + * Personal insults. + * Violent threats or language directed against another person. + * Sexist, racist, or otherwise discriminatory jokes and language. + * Any form of sexual or violent material. + * Sharing private content, such as emails sent privately or non-publicly, or unlogged forums such + as IRC channel history. + * Excessive or unnecessary profanity. + * Repeated harassment of others. In general, if someone asks you to stop, then stop. + * Advocating for, or encouraging, any of the above behaviour. + +#### Try to be concise in communication + +Keep in mind that what you write once will be read by many others. Writing a short email means +people can understand the conversation as efficiently as possible. Even short emails should always +strive to be empathetic, welcoming, friendly and patient. When a long explanation is necessary, +consider adding a summary. + +Try to bring new ideas to a conversation, so that each message adds something unique to the +conversation. Keep in mind that, when using email, the rest of the thread still contains the other +messages with arguments that have already been made. + +Try to stay on topic, especially in discussions that are already fairly long and complex. + +#### Respectful + +Members of the community are respectful. We are respectful of others, their positions, their +skills, their commitments, and their efforts. We are respectful of the volunteer and professional +efforts that permeate the NIPY community. We are respectful of the processes set forth in the +community, and we work within them. When we disagree, we are courteous and kind in raising our +issues. + +## Incident Reporting + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting +project maintainer Chris Markiewicz . All complaints will be reviewed and +investigated and will result in a response that is deemed necessary and appropriate to the +circumstances. The project team is obligated to maintain confidentiality with regard to the +reporter of an incident. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face +temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +The vast majority of the above was taken from the NIPY Code of Conduct. +Incident reporting guidelines were adapted from the [Contributor +Covenant](https://www.contributor-covenant.org/), version 1.4, available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct. From b1c97ecdc1e04372e7d5c74ec767839c29d1caea Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 26 Jul 2019 14:29:43 -0400 Subject: [PATCH 171/689] DOC: Update docstring standard --- doc/source/devel/devguide.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/devel/devguide.rst b/doc/source/devel/devguide.rst index 370f4c8d2a..5e155b75ec 100644 --- a/doc/source/devel/devguide.rst +++ b/doc/source/devel/devguide.rst @@ -28,7 +28,7 @@ Code Documentation All documentation should be written using Numpy documentation conventions: - https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt#docstring-standard + https://numpydoc.readthedocs.io/en/latest/format.html#docstring-standard Git Repository From c94bc8e7203b6e6d94beec27c0c03b2d9e834867 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 28 Jul 2019 20:43:38 -0400 Subject: [PATCH 172/689] DOC: Contributing guide, mostly linking to docs --- .github/CONTRIBUTING.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/CONTRIBUTING.md diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 0000000000..81687ac149 --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,16 @@ +# Contributing to NiBabel + +Welcome to the NiBabel repository! +We're excited you're here and want to contribute. + +Please see the [NiBabel Developer Guidelines][link_devguide] on our +on our [documentation website][link_docs]. + +These guidelines are designed to make it as easy as possible to get involved. +If you have any questions that aren't discussed in our documentation, or it's +difficult to find what you're looking for, please let us know by opening an +[issue][link_issues]! + +[link_docs]: https://nipy.org/nibabel +[link_devguide]: https://nipy.org/nibabel/devel/devguide.html +[link_issues]: https://github.com/poldracklab/fmriprep/issues From e2853aa9227ea34a727457201942a7391de56f93 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 29 Jul 2019 09:22:09 -0400 Subject: [PATCH 173/689] DOC: Drop covenant boilerplate --- .github/CODE_OF_CONDUCT.md | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index b793f4611b..52b16c8f09 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -89,18 +89,17 @@ issues. ## Incident Reporting -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting -project maintainer Chris Markiewicz . All complaints will be reviewed and -investigated and will result in a response that is deemed necessary and appropriate to the -circumstances. The project team is obligated to maintain confidentiality with regard to the -reporter of an incident. +We put great value on respectful, friendly and helpful communication. -Project maintainers who do not follow or enforce the Code of Conduct in good faith may face -temporary or permanent repercussions as determined by other members of the project's leadership. +If you feel that any of our Nibabel communications lack respect, or are unfriendly or unhelpful, +please try the following steps: + +* If you feel able, please let the person who has sent the email or comment that you found it + disrespectful / unhelpful / unfriendly, and why; + +* If you don't feel able to do that, or that didn't work, please contact Chris Markiewicz directly + by email (), and he will do his best to resolve it. ## Attribution The vast majority of the above was taken from the NIPY Code of Conduct. -Incident reporting guidelines were adapted from the [Contributor -Covenant](https://www.contributor-covenant.org/), version 1.4, available at -https://www.contributor-covenant.org/version/1/4/code-of-conduct. From 028e987e21d295e5c7ffc3291e5ea8198ce4f1f2 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 14:56:10 -0400 Subject: [PATCH 174/689] DOC: Add Matthew Brett as point of contact --- .github/CODE_OF_CONDUCT.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index 52b16c8f09..b3e73ac8c8 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -99,6 +99,8 @@ please try the following steps: * If you don't feel able to do that, or that didn't work, please contact Chris Markiewicz directly by email (), and he will do his best to resolve it. + If you don't feel comfortable contacting Chris, please email Matthew Brett + () instead. ## Attribution From fb54f05d07d92ca514d3752f395416a69d60c03f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 09:25:29 -0400 Subject: [PATCH 175/689] CI: Add Azure Pipeline for Python 3.5 --- .azure-pipelines/windows.yml | 51 ++++++++++++++++++++++++++++++++++++ azure-pipelines.yml | 10 +++++++ 2 files changed, 61 insertions(+) create mode 100644 .azure-pipelines/windows.yml create mode 100644 azure-pipelines.yml diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml new file mode 100644 index 0000000000..af77e7717c --- /dev/null +++ b/.azure-pipelines/windows.yml @@ -0,0 +1,51 @@ + +parameters: + name: '' + vmImage: '' + matrix: [] + +jobs: +- job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + VIRTUALENV: 'testvenv' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '$(PYTHON_VERSION)' + addToPath: true + architecture: '$(PYTHON_ARCH)' + - script: | + python -m pip install --upgrade pip virtualenv + virtualenv --python=python venv + venv\\scripts\\activate + python --version + python -m pip install --upgrade pip setuptools>=30.3.0 wheel + displayName: 'Install virtualenv' + - script: | + # Optional dependencies + python -m pip install numpy scipy matplotlib h5py pydicom + # Test dependencies + python -m pip install nose mock coverage codecov + displayName: 'Install dependencies' + - script: | + python -m pip install . + SET NIBABEL_DATA_DIR=%CD%\\nibabel-data + displayName: 'Install nibabel' + - script: | + # Change into an innocuous directory and find tests from installation + mkdir for_testing + cd for_testing + cp ../.coveragerc . + nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + displayName: 'Install nibabel' + - script: | + codecov + displayName: 'Upload To Codecov' + env: + CODECOV_TOKEN: $(CODECOV_TOKEN) diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000000..9b11ad1deb --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,10 @@ +# Adapted from https://github.com/pandas-dev/pandas/blob/master/azure-pipelines.yml +jobs: +- template: .azure-pipelines/windows.yml + parameters: + name: Windows + vmImage: vs2017-win2016 + matrix: + py35: + PYTHON_VERSION: '3.5' + PYTHON_ARCH: 'x64' From c436091db15d95cc755dfe1df3cbfc4dc6e65e6a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 09:42:37 -0400 Subject: [PATCH 176/689] CI: Remove AppVeyor --- appveyor.yml | 46 ---------------------------------------------- 1 file changed, 46 deletions(-) delete mode 100644 appveyor.yml diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 796c74a7da..0000000000 --- a/appveyor.yml +++ /dev/null @@ -1,46 +0,0 @@ -# vim ft=yaml -# CI on Windows via appveyor - -environment: - - matrix: - - PYTHON: C:\Python27 - - PYTHON: C:\Python27-x64 - - PYTHON: C:\Python34 - - PYTHON: C:\Python34-x64 - - PYTHON: C:\Python35 - - PYTHON: C:\Python35-x64 - - PYTHON: C:\Python36 - - PYTHON: C:\Python36-x64 - - PYTHON: C:\Python37 - - PYTHON: C:\Python37-x64 - -install: - # Prepend newly installed Python to the PATH of this build (this cannot be - # done from inside the powershell script as it would require to restart - # the parent CMD process). - - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% - - - python -m pip install --upgrade pip virtualenv - - virtualenv --python=python venv - - venv\Scripts\activate - - python --version - - # Install the dependencies of the project. - - pip install --upgrade pip setuptools>=27.0 wheel - - pip install numpy scipy matplotlib h5py pydicom - - pip install nose mock coverage codecov - - pip install . - - SET NIBABEL_DATA_DIR=%CD%\nibabel-data - -build: false # Not a C# project, build stuff at the test step instead. - -test_script: - # Change into an innocuous directory and find tests from installation - - mkdir for_testing - - cd for_testing - - cp ../.coveragerc . - - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - -after_test: - - codecov From c33e5592899626ae59b15b4f91a006b61c963e69 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 09:58:24 -0400 Subject: [PATCH 177/689] AZURE: Environment reset at each step --- .azure-pipelines/windows.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index af77e7717c..4251a4e558 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -28,23 +28,25 @@ jobs: python -m pip install --upgrade pip setuptools>=30.3.0 wheel displayName: 'Install virtualenv' - script: | - # Optional dependencies + venv\\scripts\\activate python -m pip install numpy scipy matplotlib h5py pydicom - # Test dependencies python -m pip install nose mock coverage codecov displayName: 'Install dependencies' - script: | + venv\\scripts\\activate python -m pip install . SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - script: | - # Change into an innocuous directory and find tests from installation + venv\\scripts\\activate mkdir for_testing cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel displayName: 'Install nibabel' - script: | + venv\\scripts\\activate + cd for_testing codecov displayName: 'Upload To Codecov' env: From aabd9fbf54b4e56ec51ee0a9782a9cc86fd6a870 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 10:03:53 -0400 Subject: [PATCH 178/689] AZURE: Virtualenvs do not seem to work as expected --- .azure-pipelines/windows.yml | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 4251a4e558..3bf81d7ef3 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -8,8 +8,6 @@ jobs: - job: ${{ parameters.name }} pool: vmImage: ${{ parameters.vmImage }} - variables: - VIRTUALENV: 'testvenv' strategy: matrix: ${{ insert }}: ${{ parameters.matrix }} @@ -21,31 +19,23 @@ jobs: addToPath: true architecture: '$(PYTHON_ARCH)' - script: | - python -m pip install --upgrade pip virtualenv - virtualenv --python=python venv - venv\\scripts\\activate - python --version python -m pip install --upgrade pip setuptools>=30.3.0 wheel - displayName: 'Install virtualenv' + displayName: 'Update build tools' - script: | - venv\\scripts\\activate python -m pip install numpy scipy matplotlib h5py pydicom python -m pip install nose mock coverage codecov displayName: 'Install dependencies' - script: | - venv\\scripts\\activate python -m pip install . SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - script: | - venv\\scripts\\activate mkdir for_testing cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel displayName: 'Install nibabel' - script: | - venv\\scripts\\activate cd for_testing codecov displayName: 'Upload To Codecov' From 147f954d3952c28d862999b454be8b6f2d39fedd Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 10:06:47 -0400 Subject: [PATCH 179/689] AZURE: Fix step name --- .azure-pipelines/windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 3bf81d7ef3..831bd614df 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -34,7 +34,7 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - displayName: 'Install nibabel' + displayName: 'Nose tests' - script: | cd for_testing codecov From 9b0e4c918a7571a4cceb122fce1cc0c4dedc9a37 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 30 Jul 2019 10:17:40 -0400 Subject: [PATCH 180/689] AZURE: Expand matrix to Python 3.4-3.7, x86 and x64 --- .azure-pipelines/windows.yml | 4 +++- azure-pipelines.yml | 17 ++++++++++++++++- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 831bd614df..ad1b72eb77 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -8,6 +8,8 @@ jobs: - job: ${{ parameters.name }} pool: vmImage: ${{ parameters.vmImage }} + variables: + EXTRA_WHEELS: "https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" strategy: matrix: ${{ insert }}: ${{ parameters.matrix }} @@ -22,7 +24,7 @@ jobs: python -m pip install --upgrade pip setuptools>=30.3.0 wheel displayName: 'Update build tools' - script: | - python -m pip install numpy scipy matplotlib h5py pydicom + python -m pip install --find-links %EXTRA_WHEELS% numpy scipy matplotlib h5py pydicom python -m pip install nose mock coverage codecov displayName: 'Install dependencies' - script: | diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 9b11ad1deb..cb2612d5c3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -5,6 +5,21 @@ jobs: name: Windows vmImage: vs2017-win2016 matrix: - py35: + py35-x86: PYTHON_VERSION: '3.5' + PYTHON_ARCH: 'x86' + py35-x64: + PYTHON_VERSION: '3.5' + PYTHON_ARCH: 'x64' + py36-x86: + PYTHON_VERSION: '3.6' + PYTHON_ARCH: 'x86' + py36-x64: + PYTHON_VERSION: '3.6' + PYTHON_ARCH: 'x64' + py37-x86: + PYTHON_VERSION: '3.7' + PYTHON_ARCH: 'x86' + py37-x64: + PYTHON_VERSION: '3.7' PYTHON_ARCH: 'x64' From bec7b15487934767671a83702922febb22a8d7d9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 14 Aug 2019 11:19:46 -0400 Subject: [PATCH 181/689] CI: Test installing from git archive --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index e6111e8790..6f08335eb5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -72,6 +72,9 @@ matrix: - python: 2.7 env: - INSTALL_TYPE=requirements + - python: 2.7 + env: + - INSTALL_TYPE=archive - python: 2.7 env: - CHECK_TYPE="style" @@ -122,6 +125,9 @@ install: elif [ "$INSTALL_TYPE" == "requirements" ]; then pip install $EXTRA_PIP_FLAGS -r requirements.txt python setup.py install + elif [ "$INSTALL_TYPE" == "archive" ]; then + git archive -o package.tar.gz HEAD + pip install $EXTRA_PIP_FLAGS package.tar.gz fi # Point to nibabel data directory - export NIBABEL_DATA_DIR="$PWD/nibabel-data" From f7e66e712699f86d0dcc587ac1a5a9cde7eac48f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 5 Aug 2019 17:30:11 -0700 Subject: [PATCH 182/689] CI: Drop 2.7, 3.4 tests from Travis --- .travis.yml | 33 ++++++++------------------------- 1 file changed, 8 insertions(+), 25 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6f08335eb5..f8ca58a75f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,20 +22,16 @@ env: - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" python: - - 3.5 - 3.6 - 3.7 matrix: include: - - python: 3.4 - dist: trusty - sudo: false # Absolute minimum dependencies - - python: 2.7 + - python: 3.5 env: - DEPENDS="numpy==1.8 setuptools==30.3.0" # Absolute minimum dependencies - - python: 2.7 + - python: 3.5 env: - DEPENDS="numpy==1.8" - CHECK_TYPE="import" @@ -44,54 +40,41 @@ matrix: # nibabel/info.py # doc/source/installation.rst # requirements.txt - - python: 2.7 + - python: 3.5 env: - DEPENDS="numpy==1.8 matplotlib==1.3.1" # Minimum pydicom dependency - - python: 2.7 + - python: 3.5 env: - DEPENDS="numpy==1.8 pydicom==0.9.9 pillow==2.6" # pydicom master branch - python: 3.5 env: - DEPENDS="numpy git+https://github.com/pydicom/pydicom.git@master" - # test 2.7 against pre-release builds of everything - - python: 2.7 - env: - - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" # test 3.5 against pre-release builds of everything - python: 3.5 env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" - - python: 2.7 + - python: 3.5 env: - INSTALL_TYPE=sdist - - python: 2.7 + - python: 3.5 env: - INSTALL_TYPE=wheel - - python: 2.7 + - python: 3.5 env: - INSTALL_TYPE=requirements - - python: 2.7 + - python: 3.5 env: - INSTALL_TYPE=archive - - python: 2.7 - env: - - CHECK_TYPE="style" - python: 3.5 env: - CHECK_TYPE="style" # Documentation doctests - - python: 2.7 - env: - - CHECK_TYPE="doc_doctests" - python: 3.5 env: - CHECK_TYPE="doc_doctests" # Run tests with indexed_gzip present - - python: 2.7 - env: - - OPTIONAL_DEPENDS="indexed_gzip" - python: 3.5 env: - OPTIONAL_DEPENDS="indexed_gzip" From e2639c4b89c90716fe6f8841fc268651a3493b16 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 09:49:08 -0400 Subject: [PATCH 183/689] MAINT: Update dependencies for Python >= 3.5 --- .travis.yml | 8 ++++---- requirements.txt | 2 +- setup.cfg | 10 ++++++---- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index f8ca58a75f..f41c3efb5c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,11 +29,11 @@ matrix: # Absolute minimum dependencies - python: 3.5 env: - - DEPENDS="numpy==1.8 setuptools==30.3.0" + - DEPENDS="numpy==1.10.1 setuptools==30.3.0" # Absolute minimum dependencies - python: 3.5 env: - - DEPENDS="numpy==1.8" + - DEPENDS="numpy==1.10.1" - CHECK_TYPE="import" # Absolute minimum dependencies plus oldest MPL # Check these against: @@ -42,11 +42,11 @@ matrix: # requirements.txt - python: 3.5 env: - - DEPENDS="numpy==1.8 matplotlib==1.3.1" + - DEPENDS="numpy==1.10.1 matplotlib==1.3.1" # Minimum pydicom dependency - python: 3.5 env: - - DEPENDS="numpy==1.8 pydicom==0.9.9 pillow==2.6" + - DEPENDS="numpy==1.10.1 pydicom==0.9.9 pillow==2.6" # pydicom master branch - python: 3.5 env: diff --git a/requirements.txt b/requirements.txt index 6299333665..fecac9af58 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,4 @@ # doc/source/installation.rst six>=1.3 -numpy>=1.8 +numpy>=1.10.1 diff --git a/setup.cfg b/setup.cfg index 69bd84afe7..e1192b6a2d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -29,12 +29,13 @@ provides = nisext [options] -python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* +python_requires = >=3.5 install_requires = - numpy >=1.8 + numpy >=1.10.1 six >=1.3 - bz2file ; python_version < "3.0" -tests_require = nose +tests_require = + nose >=0.10.1 + mock test_suite = nose.collector zip_safe = False packages = find: @@ -47,6 +48,7 @@ doc = sphinx >=0.3 test = nose >=0.10.1 + mock all = %(dicom)s %(doc)s From 83d934e218e595d8ea198bd5bf29a6e199536b37 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 10:08:06 -0400 Subject: [PATCH 184/689] MAINT: Add update_requirements.py; update requirements --- .travis.yml | 8 ++++---- doc/source/installation.rst | 9 +++------ min-requirements.txt | 3 +++ requirements.txt | 12 +++--------- tools/update_requirements.py | 29 +++++++++++++++++++++++++++++ 5 files changed, 42 insertions(+), 19 deletions(-) create mode 100644 min-requirements.txt create mode 100755 tools/update_requirements.py diff --git a/.travis.yml b/.travis.yml index f41c3efb5c..1ba3fd50a7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,11 +29,11 @@ matrix: # Absolute minimum dependencies - python: 3.5 env: - - DEPENDS="numpy==1.10.1 setuptools==30.3.0" + - DEPENDS="-r min-requirements.txt setuptools==30.3.0" # Absolute minimum dependencies - python: 3.5 env: - - DEPENDS="numpy==1.10.1" + - DEPENDS="-r min-requirements.txt" - CHECK_TYPE="import" # Absolute minimum dependencies plus oldest MPL # Check these against: @@ -42,11 +42,11 @@ matrix: # requirements.txt - python: 3.5 env: - - DEPENDS="numpy==1.10.1 matplotlib==1.3.1" + - DEPENDS="-r min-requirements.txt matplotlib==1.3.1" # Minimum pydicom dependency - python: 3.5 env: - - DEPENDS="numpy==1.10.1 pydicom==0.9.9 pillow==2.6" + - DEPENDS="-r min-requirements.txt pydicom==0.9.9 pillow==2.6" # pydicom master branch - python: 3.5 env: diff --git a/doc/source/installation.rst b/doc/source/installation.rst index c853de9619..22a0e054e0 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -81,13 +81,10 @@ is for you. Requirements ------------ -.. check these against: - nibabel/info.py - requirements.txt - .travis.yml +.. check these against setup.cfg -* Python_ 2.7, or >= 3.4 -* NumPy_ 1.8 or greater +* Python_ 3.5 or greater +* NumPy_ 1.10.1 or greater * Six_ 1.3 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) diff --git a/min-requirements.txt b/min-requirements.txt new file mode 100644 index 0000000000..49a90b7d19 --- /dev/null +++ b/min-requirements.txt @@ -0,0 +1,3 @@ +# Auto-generated by tools/update_requirements.py +numpy ==1.10.1 +six ==1.3 diff --git a/requirements.txt b/requirements.txt index fecac9af58..d3dc9acc89 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,3 @@ -# Minumum requirements -# -# Check these against -# nibabel/info.py -# .travis.yml -# doc/source/installation.rst - -six>=1.3 -numpy>=1.10.1 +# Auto-generated by tools/update_requirements.py +numpy >=1.10.1 +six >=1.3 diff --git a/tools/update_requirements.py b/tools/update_requirements.py new file mode 100755 index 0000000000..551424994c --- /dev/null +++ b/tools/update_requirements.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +import sys +from configparser import ConfigParser +from pathlib import Path + +if sys.version_info < (3, 6): + print("This script requires Python 3.6 to work correctly") + sys.exit(1) + +repo_root = Path(__file__).parent.parent +setup_cfg = repo_root / "setup.cfg" +reqs = repo_root / "requirements.txt" +min_reqs = repo_root / "min-requirements.txt" + +config = ConfigParser() +config.read(setup_cfg) +requirements = config.get("options", "install_requires").strip().splitlines() + +script_name = Path(__file__).relative_to(repo_root) + +lines = [f"# Auto-generated by {script_name}", ""] + +# Write requirements +lines[1:-1] = requirements +reqs.write_text("\n".join(lines)) + +# Write minimum requirements +lines[1:-1] = [req.replace(">=", "==").replace("~=", "==") for req in requirements] +min_reqs.write_text("\n".join(lines)) From 5c496aa493a41f88d3f2ee52eefefca7cf5af697 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 10:12:26 -0400 Subject: [PATCH 185/689] FIX: Update minimum six --- min-requirements.txt | 2 +- requirements.txt | 2 +- setup.cfg | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/min-requirements.txt b/min-requirements.txt index 49a90b7d19..63b1a92f2d 100644 --- a/min-requirements.txt +++ b/min-requirements.txt @@ -1,3 +1,3 @@ # Auto-generated by tools/update_requirements.py numpy ==1.10.1 -six ==1.3 +six ==1.7 diff --git a/requirements.txt b/requirements.txt index d3dc9acc89..9e9213051c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ # Auto-generated by tools/update_requirements.py numpy >=1.10.1 -six >=1.3 +six >=1.7 diff --git a/setup.cfg b/setup.cfg index e1192b6a2d..03b2aa50c0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -32,7 +32,7 @@ provides = python_requires = >=3.5 install_requires = numpy >=1.10.1 - six >=1.3 + six >=1.7 tests_require = nose >=0.10.1 mock From 21ab6472bdb9ec514242d5e6e844bc4a242bb171 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 10:31:21 -0400 Subject: [PATCH 186/689] FIX: Need numpy 1.12 for fixed assert_warns --- doc/source/installation.rst | 4 ++-- min-requirements.txt | 2 +- requirements.txt | 2 +- setup.cfg | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 22a0e054e0..fa261e726d 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -84,8 +84,8 @@ Requirements .. check these against setup.cfg * Python_ 3.5 or greater -* NumPy_ 1.10.1 or greater -* Six_ 1.3 or greater +* NumPy_ 1.12 or greater +* Six_ 1.7 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) diff --git a/min-requirements.txt b/min-requirements.txt index 63b1a92f2d..a0042b2e49 100644 --- a/min-requirements.txt +++ b/min-requirements.txt @@ -1,3 +1,3 @@ # Auto-generated by tools/update_requirements.py -numpy ==1.10.1 +numpy ==1.12 six ==1.7 diff --git a/requirements.txt b/requirements.txt index 9e9213051c..8a1554b73d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ # Auto-generated by tools/update_requirements.py -numpy >=1.10.1 +numpy >=1.12 six >=1.7 diff --git a/setup.cfg b/setup.cfg index 03b2aa50c0..ebf7609909 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ provides = [options] python_requires = >=3.5 install_requires = - numpy >=1.10.1 + numpy >=1.12 six >=1.7 tests_require = nose >=0.10.1 From c1ccbb157e16febdb3f38f7ed90ac77bc1dddc00 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 10:55:30 -0400 Subject: [PATCH 187/689] MAINT: Purge __future__ --- doc/source/scripts/make_coord_examples.py | 1 - doc/tools/build_modref_templates.py | 1 - nibabel/arraywriters.py | 1 - nibabel/benchmarks/bench_array_to_file.py | 1 - nibabel/benchmarks/bench_fileslice.py | 1 - nibabel/benchmarks/bench_finite_range.py | 1 - nibabel/benchmarks/bench_load_save.py | 1 - nibabel/benchmarks/bench_streamlines.py | 1 - nibabel/benchmarks/butils.py | 1 - nibabel/brikhead.py | 1 - nibabel/checkwarns.py | 1 - nibabel/cifti2/cifti2.py | 1 - nibabel/cifti2/parse_cifti2.py | 1 - nibabel/cifti2/tests/test_cifti2io_header.py | 1 - nibabel/cmdline/diff.py | 1 - nibabel/cmdline/ls.py | 1 - nibabel/cmdline/parrec2nii.py | 1 - nibabel/dft.py | 1 - nibabel/externals/netcdf.py | 1 - nibabel/externals/oset.py | 1 - nibabel/externals/six.py | 1 - nibabel/externals/tests/test_netcdf.py | 1 - nibabel/fileslice.py | 1 - nibabel/freesurfer/io.py | 1 - nibabel/freesurfer/tests/test_io.py | 1 - nibabel/gifti/gifti.py | 1 - nibabel/gifti/parse_gifti_fast.py | 1 - nibabel/gifti/tests/test_parse_gifti_fast.py | 1 - nibabel/mriutils.py | 1 - nibabel/nicom/dicomreaders.py | 1 - nibabel/nicom/dicomwrappers.py | 1 - nibabel/nicom/utils.py | 1 - nibabel/nifti1.py | 1 - nibabel/onetime.py | 1 - nibabel/orientations.py | 1 - nibabel/parrec.py | 1 - nibabel/processing.py | 1 - nibabel/rstutils.py | 1 - nibabel/streamlines/array_sequence.py | 1 - nibabel/streamlines/tck.py | 1 - nibabel/streamlines/trk.py | 1 - nibabel/testing/__init__.py | 1 - nibabel/tests/test_api_validators.py | 1 - nibabel/tests/test_arrayproxy.py | 1 - nibabel/tests/test_arraywriters.py | 1 - nibabel/tests/test_brikhead.py | 1 - nibabel/tests/test_data.py | 1 - nibabel/tests/test_diff.py | 1 - nibabel/tests/test_ecat.py | 1 - nibabel/tests/test_ecat_data.py | 1 - nibabel/tests/test_filehandles.py | 1 - nibabel/tests/test_funcs.py | 1 - nibabel/tests/test_image_api.py | 1 - nibabel/tests/test_image_load_save.py | 1 - nibabel/tests/test_image_types.py | 1 - nibabel/tests/test_loadsave.py | 1 - nibabel/tests/test_minc1.py | 1 - nibabel/tests/test_minc2.py | 1 - nibabel/tests/test_minc2_data.py | 1 - nibabel/tests/test_mriutils.py | 1 - nibabel/tests/test_nifti1.py | 1 - nibabel/tests/test_nifti2.py | 1 - nibabel/tests/test_parrec_data.py | 1 - nibabel/tests/test_processing.py | 1 - nibabel/tests/test_proxy_api.py | 1 - nibabel/tests/test_rstutils.py | 1 - nibabel/tests/test_scaling.py | 1 - nibabel/tests/test_scripts.py | 1 - nibabel/tests/test_testing.py | 1 - nibabel/tests/test_tmpdirs.py | 1 - nibabel/tests/test_trackvis.py | 1 - nibabel/tests/test_volumeutils.py | 1 - nibabel/tmpdirs.py | 1 - nibabel/trackvis.py | 1 - nibabel/viewers.py | 1 - nibabel/volumeutils.py | 1 - nisext/py3builder.py | 1 - nisext/testers.py | 1 - nisext/tests/test_testers.py | 1 - tools/refresh_readme.py | 1 - 80 files changed, 80 deletions(-) diff --git a/doc/source/scripts/make_coord_examples.py b/doc/source/scripts/make_coord_examples.py index 9079cea141..f763b28c28 100644 --- a/doc/source/scripts/make_coord_examples.py +++ b/doc/source/scripts/make_coord_examples.py @@ -15,7 +15,6 @@ * someones_epi.nii.gz (pretend single EPI volume) * someones_anatomy.nii.gz (pretend single subject structural) """ -from __future__ import division, print_function import math diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index 53a8be6ec4..3b988a2135 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -1,7 +1,6 @@ #!/usr/bin/env python """Script to auto-generate our API docs. """ -from __future__ import print_function, division # stdlib imports import sys diff --git a/nibabel/arraywriters.py b/nibabel/arraywriters.py index 2bd29e4ca4..fa2d1e4e1c 100644 --- a/nibabel/arraywriters.py +++ b/nibabel/arraywriters.py @@ -28,7 +28,6 @@ def __init__(self, array, out_dtype=None) something else to make sense of conversions between float and int, or between larger ints and smaller. """ -from __future__ import division, absolute_import import warnings diff --git a/nibabel/benchmarks/bench_array_to_file.py b/nibabel/benchmarks/bench_array_to_file.py index 36921a106a..4908848685 100644 --- a/nibabel/benchmarks/bench_array_to_file.py +++ b/nibabel/benchmarks/bench_array_to_file.py @@ -13,7 +13,6 @@ nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_load_save.py """ -from __future__ import division, print_function import sys from io import BytesIO # NOQA diff --git a/nibabel/benchmarks/bench_fileslice.py b/nibabel/benchmarks/bench_fileslice.py index b9568c65a0..764e0390b5 100644 --- a/nibabel/benchmarks/bench_fileslice.py +++ b/nibabel/benchmarks/bench_fileslice.py @@ -11,7 +11,6 @@ nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_fileslice.py """ -from __future__ import division, print_function import sys from timeit import timeit diff --git a/nibabel/benchmarks/bench_finite_range.py b/nibabel/benchmarks/bench_finite_range.py index 5f268eb285..6aa9d9d861 100644 --- a/nibabel/benchmarks/bench_finite_range.py +++ b/nibabel/benchmarks/bench_finite_range.py @@ -13,7 +13,6 @@ nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_finite_range """ -from __future__ import division, print_function import sys diff --git a/nibabel/benchmarks/bench_load_save.py b/nibabel/benchmarks/bench_load_save.py index c2ee68578a..59198eac1a 100644 --- a/nibabel/benchmarks/bench_load_save.py +++ b/nibabel/benchmarks/bench_load_save.py @@ -13,7 +13,6 @@ nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_load_save.py """ -from __future__ import division, print_function import sys diff --git a/nibabel/benchmarks/bench_streamlines.py b/nibabel/benchmarks/bench_streamlines.py index c076657d27..2bd6df2c46 100644 --- a/nibabel/benchmarks/bench_streamlines.py +++ b/nibabel/benchmarks/bench_streamlines.py @@ -13,7 +13,6 @@ nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_streamlines.py """ -from __future__ import division, print_function import numpy as np diff --git a/nibabel/benchmarks/butils.py b/nibabel/benchmarks/butils.py index 36e42f270d..bea5872272 100644 --- a/nibabel/benchmarks/butils.py +++ b/nibabel/benchmarks/butils.py @@ -1,6 +1,5 @@ """ Benchmarking utilities """ -from __future__ import print_function, division from .. import get_info diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 1a12abfbca..ee06e6c119 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -27,7 +27,6 @@ am aware) always be >= 1. This permits sub-brick indexing common in AFNI programs (e.g., example4d+orig'[0]'). """ -from __future__ import print_function, division from copy import deepcopy import os diff --git a/nibabel/checkwarns.py b/nibabel/checkwarns.py index deb3f6f009..a5942427b6 100644 --- a/nibabel/checkwarns.py +++ b/nibabel/checkwarns.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Contexts for *with* statement allowing checks for warnings ''' -from __future__ import division, print_function import warnings diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 104c9396cd..8c4f26d5bd 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -16,7 +16,6 @@ http://www.nitrc.org/projects/cifti ''' -from __future__ import division, print_function, absolute_import import re try: from collections.abc import MutableSequence, MutableMapping, Iterable diff --git a/nibabel/cifti2/parse_cifti2.py b/nibabel/cifti2/parse_cifti2.py index 608636a446..8c3d40cd56 100644 --- a/nibabel/cifti2/parse_cifti2.py +++ b/nibabel/cifti2/parse_cifti2.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import from distutils.version import LooseVersion diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index e4970625a4..3e3cd9c77d 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import from os.path import join as pjoin, dirname import io diff --git a/nibabel/cmdline/diff.py b/nibabel/cmdline/diff.py index 4b8b69381c..fde72431fb 100755 --- a/nibabel/cmdline/diff.py +++ b/nibabel/cmdline/diff.py @@ -10,7 +10,6 @@ """ Quick summary of the differences among a set of neuroimaging files """ -from __future__ import division, print_function, absolute_import import re import sys diff --git a/nibabel/cmdline/ls.py b/nibabel/cmdline/ls.py index f919700247..68bd6ee8c0 100755 --- a/nibabel/cmdline/ls.py +++ b/nibabel/cmdline/ls.py @@ -10,7 +10,6 @@ """ Output a summary table for neuroimaging files (resolution, dimensionality, etc.) """ -from __future__ import division, print_function, absolute_import import sys from optparse import OptionParser, Option diff --git a/nibabel/cmdline/parrec2nii.py b/nibabel/cmdline/parrec2nii.py index 9bbc303848..0dfa03cac9 100644 --- a/nibabel/cmdline/parrec2nii.py +++ b/nibabel/cmdline/parrec2nii.py @@ -1,6 +1,5 @@ """Code for PAR/REC to NIfTI converter command """ -from __future__ import division, print_function, absolute_import from optparse import OptionParser, Option import numpy as np diff --git a/nibabel/dft.py b/nibabel/dft.py index 392856d4c1..8d7da2366d 100644 --- a/nibabel/dft.py +++ b/nibabel/dft.py @@ -10,7 +10,6 @@ """ DICOM filesystem tools """ -from __future__ import division, print_function, absolute_import import os from os.path import join as pjoin diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index e485533cd7..a19ef9bb8a 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -12,7 +12,6 @@ with NetCDF files. """ -from __future__ import division, print_function, absolute_import # TODO: # * properly implement ``_FillValue``. diff --git a/nibabel/externals/oset.py b/nibabel/externals/oset.py index 83b1e3e24d..43b131246d 100644 --- a/nibabel/externals/oset.py +++ b/nibabel/externals/oset.py @@ -13,7 +13,6 @@ License: BSD-3 """ -from __future__ import absolute_import try: from collections.abc import MutableSet diff --git a/nibabel/externals/six.py b/nibabel/externals/six.py index 77e656cd67..d93ec84e8d 100644 --- a/nibabel/externals/six.py +++ b/nibabel/externals/six.py @@ -1,6 +1,5 @@ """ Shim allowing some grace time for removal of six.py copy """ # Remove around version 4.0 -from __future__ import absolute_import import warnings diff --git a/nibabel/externals/tests/test_netcdf.py b/nibabel/externals/tests/test_netcdf.py index 679d9d5ff4..289e6791c1 100644 --- a/nibabel/externals/tests/test_netcdf.py +++ b/nibabel/externals/tests/test_netcdf.py @@ -1,5 +1,4 @@ ''' Tests for netcdf ''' -from __future__ import division, print_function, absolute_import import os from os.path import join as pjoin, dirname diff --git a/nibabel/fileslice.py b/nibabel/fileslice.py index e55f48c127..61725d3a5a 100644 --- a/nibabel/fileslice.py +++ b/nibabel/fileslice.py @@ -1,6 +1,5 @@ """ Utilities for getting array slices out of file-like objects """ -from __future__ import division import operator from numbers import Integral diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index 9dda179d1c..8326fb64c6 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -1,6 +1,5 @@ """ Read / write FreeSurfer geometry, morphometry, label, annotation formats """ -from __future__ import division, print_function, absolute_import import warnings import numpy as np diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 51fe123025..fc926ee2af 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -1,4 +1,3 @@ -from __future__ import division, print_function, absolute_import import os from os.path import join as pjoin, isdir import getpass diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 22d6449e9a..5e5cd36c5d 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -11,7 +11,6 @@ The Gifti specification was (at time of writing) available as a PDF download from http://www.nitrc.org/projects/gifti/ """ -from __future__ import division, print_function, absolute_import import sys diff --git a/nibabel/gifti/parse_gifti_fast.py b/nibabel/gifti/parse_gifti_fast.py index f27e0725d6..044a70fede 100644 --- a/nibabel/gifti/parse_gifti_fast.py +++ b/nibabel/gifti/parse_gifti_fast.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import import base64 import sys diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 726779d988..a06180a964 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import from os.path import join as pjoin, dirname import sys diff --git a/nibabel/mriutils.py b/nibabel/mriutils.py index 0f27544fae..b0f3f6a86f 100644 --- a/nibabel/mriutils.py +++ b/nibabel/mriutils.py @@ -9,7 +9,6 @@ """ Utilities for calculations related to MRI """ -from __future__ import division __all__ = ['calculate_dwell_time'] diff --git a/nibabel/nicom/dicomreaders.py b/nibabel/nicom/dicomreaders.py index d18a43af96..ad8d9c6b64 100644 --- a/nibabel/nicom/dicomreaders.py +++ b/nibabel/nicom/dicomreaders.py @@ -1,4 +1,3 @@ -from __future__ import division, print_function, absolute_import from os.path import join as pjoin import glob diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 194227c6cf..ab80b51402 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -11,7 +11,6 @@ processing that needs to raise an error, should be in a method, rather than in a property, or property-like thing. """ -from __future__ import division import operator diff --git a/nibabel/nicom/utils.py b/nibabel/nicom/utils.py index 66688b801b..7673ada63e 100644 --- a/nibabel/nicom/utils.py +++ b/nibabel/nicom/utils.py @@ -1,6 +1,5 @@ """ Utilities for working with DICOM datasets """ -from __future__ import division, print_function, absolute_import from ..py3k import asstr diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index c2d409e81a..20ff7a909b 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -10,7 +10,6 @@ NIfTI1 format defined at http://nifti.nimh.nih.gov/nifti-1/ ''' -from __future__ import division, print_function import warnings from io import BytesIO from six import string_types diff --git a/nibabel/onetime.py b/nibabel/onetime.py index 1f410b9a1e..a036d0d229 100644 --- a/nibabel/onetime.py +++ b/nibabel/onetime.py @@ -19,7 +19,6 @@ [2] Python data model, https://docs.python.org/reference/datamodel.html """ -from __future__ import division, print_function, absolute_import # ----------------------------------------------------------------------------- # Classes and Functions diff --git a/nibabel/orientations.py b/nibabel/orientations.py index 9f3bbfed4d..ddea3159d0 100644 --- a/nibabel/orientations.py +++ b/nibabel/orientations.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Utilities for calculating and applying affine orientations ''' -from __future__ import division, print_function, absolute_import import numpy as np import numpy.linalg as npl diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 1dfa998394..8ed9adf2c5 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -121,7 +121,6 @@ utility via the option "--strict-sort". The dimension info can be exported to a CSV file by adding the option "--volume-info". """ -from __future__ import print_function, division import warnings import numpy as np diff --git a/nibabel/processing.py b/nibabel/processing.py index cf9f60c76c..449e6b41fc 100644 --- a/nibabel/processing.py +++ b/nibabel/processing.py @@ -14,7 +14,6 @@ Smoothing and resampling routines need scipy """ -from __future__ import print_function, division, absolute_import import numpy as np import numpy.linalg as npl diff --git a/nibabel/rstutils.py b/nibabel/rstutils.py index 6a330174b1..d0bdb655b0 100644 --- a/nibabel/rstutils.py +++ b/nibabel/rstutils.py @@ -2,7 +2,6 @@ * Make ReST table given array of values """ -from __future__ import division import numpy as np diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 5e6df6bf26..e86cbb5127 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -1,4 +1,3 @@ -from __future__ import division import numbers from operator import mul diff --git a/nibabel/streamlines/tck.py b/nibabel/streamlines/tck.py index 9b1888ebba..4d3a887ce9 100644 --- a/nibabel/streamlines/tck.py +++ b/nibabel/streamlines/tck.py @@ -3,7 +3,6 @@ TCK format is defined at http://mrtrix.readthedocs.io/en/latest/getting_started/image_data.html?highlight=format#tracks-file-format-tck """ -from __future__ import division import os import warnings diff --git a/nibabel/streamlines/trk.py b/nibabel/streamlines/trk.py index 805b44edcf..aba689d7b3 100644 --- a/nibabel/streamlines/trk.py +++ b/nibabel/streamlines/trk.py @@ -1,4 +1,3 @@ -from __future__ import division # Definition of trackvis header structure: # http://www.trackvis.org/docs/?subsect=fileformat diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 16f2112299..74f2c99845 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Utilities for testing ''' -from __future__ import division, print_function import re import os diff --git a/nibabel/tests/test_api_validators.py b/nibabel/tests/test_api_validators.py index affa89d3e3..b47a195291 100644 --- a/nibabel/tests/test_api_validators.py +++ b/nibabel/tests/test_api_validators.py @@ -1,6 +1,5 @@ """ Metaclass and class for validating instance APIs """ -from __future__ import division, print_function, absolute_import from six import with_metaclass diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index 187d5940df..b1cc081b6d 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Tests for arrayproxy module """ -from __future__ import division, print_function, absolute_import import warnings import gzip diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index b4a3a48e93..e1f72bc334 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -2,7 +2,6 @@ See docstring of :mod:`nibabel.arraywriters` for API. """ -from __future__ import division, print_function, absolute_import import sys from platform import python_compiler, machine diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index c1632c06c2..a99e6c41b6 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import from os.path import join as pjoin diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index fbb225838d..641d6e55cd 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: ''' Tests for data module ''' -from __future__ import division, print_function, absolute_import import os from os.path import join as pjoin from os import environ as env diff --git a/nibabel/tests/test_diff.py b/nibabel/tests/test_diff.py index 4f99ca145f..754779dbc9 100644 --- a/nibabel/tests/test_diff.py +++ b/nibabel/tests/test_diff.py @@ -2,7 +2,6 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """ Test diff """ -from __future__ import division, print_function, absolute_import from os.path import (dirname, join as pjoin, abspath) import numpy as np diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index 2bfd983c53..9005d32d4f 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import import os import warnings diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index dce96646e8..471bc6b93c 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Test we can correctly import example ECAT files """ -from __future__ import print_function, absolute_import import os from os.path import join as pjoin diff --git a/nibabel/tests/test_filehandles.py b/nibabel/tests/test_filehandles.py index 365a418890..1533b7c4f8 100644 --- a/nibabel/tests/test_filehandles.py +++ b/nibabel/tests/test_filehandles.py @@ -1,7 +1,6 @@ """ Check that loading an image does not use up filehandles. """ -from __future__ import division, print_function, absolute_import from os.path import join as pjoin import shutil diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 6032c08672..8a2a7918d8 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Test for image funcs ''' -from __future__ import division, print_function, absolute_import import numpy as np diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index ac2a2428c4..a7cfb667e3 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -23,7 +23,6 @@ * ``img.in_memory`` is True for an array image, and for a proxy image that is cached, but False otherwise. """ -from __future__ import division, print_function, absolute_import import warnings from functools import partial diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index f7318945e7..7101b6a31b 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Tests for loader function ''' -from __future__ import division, print_function, absolute_import from io import BytesIO import shutil diff --git a/nibabel/tests/test_image_types.py b/nibabel/tests/test_image_types.py index e72ad6bbbc..3ffc65eead 100644 --- a/nibabel/tests/test_image_types.py +++ b/nibabel/tests/test_image_types.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Tests for is_image / may_contain_header functions ''' -from __future__ import division, print_function, absolute_import import copy from os.path import dirname, basename, join as pjoin diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 676c09c121..57464a33a5 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -1,6 +1,5 @@ """ Testing loadsave module """ -from __future__ import print_function from os.path import dirname, join as pjoin import shutil diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index 1c150b02d5..50f4955917 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import from os.path import join as pjoin diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index c4cb9341ca..d456de0eec 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -6,7 +6,6 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -from __future__ import division, print_function, absolute_import from os.path import join as pjoin diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index 1ec4999a43..0471f87e7e 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Test we can correctly import example MINC2_PATH files """ -from __future__ import print_function, absolute_import import os from os.path import join as pjoin diff --git a/nibabel/tests/test_mriutils.py b/nibabel/tests/test_mriutils.py index 6978d9c253..527afc61ba 100644 --- a/nibabel/tests/test_mriutils.py +++ b/nibabel/tests/test_mriutils.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Testing mriutils module """ -from __future__ import division from numpy.testing import (assert_almost_equal, diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 931455e75a..1983fb6a9e 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Tests for nifti reading package ''' -from __future__ import division, print_function, absolute_import import os import warnings import struct diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index 8c7afd9ea4..730e30a689 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Tests for nifti2 reading package ''' -from __future__ import division, print_function, absolute_import import os import numpy as np diff --git a/nibabel/tests/test_parrec_data.py b/nibabel/tests/test_parrec_data.py index 630e66cab8..1cbd2923e9 100644 --- a/nibabel/tests/test_parrec_data.py +++ b/nibabel/tests/test_parrec_data.py @@ -1,6 +1,5 @@ """ Test we can correctly import example PARREC files """ -from __future__ import print_function, absolute_import from glob import glob from os.path import join as pjoin, basename, splitext, exists diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index 34b30f14c8..a09bd4cd85 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Testing processing module """ -from __future__ import division, print_function from os.path import dirname, join as pjoin diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 7280c5552d..479dd631a4 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -27,7 +27,6 @@ These last are to allow the proxy to be re-used with different images. """ -from __future__ import division, print_function, absolute_import from os.path import join as pjoin import warnings diff --git a/nibabel/tests/test_rstutils.py b/nibabel/tests/test_rstutils.py index 9fd708ba64..8952dde235 100644 --- a/nibabel/tests/test_rstutils.py +++ b/nibabel/tests/test_rstutils.py @@ -1,6 +1,5 @@ """ Test printable table """ -from __future__ import division, print_function import sys import numpy as np diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index d318c9f810..019cc58d1c 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Test for scaling / rounding in volumeutils module ''' -from __future__ import division, print_function, absolute_import import numpy as np diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 2c17c33fd1..99e9c546f0 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -4,7 +4,6 @@ Test running scripts """ -from __future__ import division, print_function, absolute_import import sys import os diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 40d5ebc41e..d1c0dbeff6 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -1,6 +1,5 @@ """ Tests for warnings context managers """ -from __future__ import division, print_function, absolute_import import sys import warnings diff --git a/nibabel/tests/test_tmpdirs.py b/nibabel/tests/test_tmpdirs.py index 48fa5885a9..1d35b59269 100644 --- a/nibabel/tests/test_tmpdirs.py +++ b/nibabel/tests/test_tmpdirs.py @@ -1,5 +1,4 @@ """ Test tmpdirs module """ -from __future__ import division, print_function, absolute_import from os import getcwd from os.path import realpath, abspath, dirname, isfile diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index 96f96a3f44..076e22f74e 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -1,5 +1,4 @@ ''' Testing trackvis module ''' -from __future__ import division, print_function, absolute_import from functools import partial diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index fcdc5c2713..6eeb6c6e55 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Test for volumeutils module ''' -from __future__ import division import os from os.path import exists diff --git a/nibabel/tmpdirs.py b/nibabel/tmpdirs.py index 8c1b704260..2636d8acb7 100644 --- a/nibabel/tmpdirs.py +++ b/nibabel/tmpdirs.py @@ -8,7 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Contexts for *with* statement providing temporary directories ''' -from __future__ import division, print_function, absolute_import import os import shutil from tempfile import template, mkdtemp diff --git a/nibabel/trackvis.py b/nibabel/trackvis.py index 233e10ba79..114d96ccec 100644 --- a/nibabel/trackvis.py +++ b/nibabel/trackvis.py @@ -4,7 +4,6 @@ We will deprecate this, the old interface, in some future release. """ -from __future__ import division, print_function import warnings import struct import itertools diff --git a/nibabel/viewers.py b/nibabel/viewers.py index 7a0f4d93d7..0cdbdcb815 100644 --- a/nibabel/viewers.py +++ b/nibabel/viewers.py @@ -3,7 +3,6 @@ Includes version of OrthoSlicer3D code originally written by our own Paul Ivanov. """ -from __future__ import division, print_function import numpy as np import weakref diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 95c7af3e45..6cd49cbb8e 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -7,7 +7,6 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Utility functions for analyze-like formats ''' -from __future__ import division, print_function import sys import warnings diff --git a/nisext/py3builder.py b/nisext/py3builder.py index 9435f6c60b..4f82a8cfb2 100644 --- a/nisext/py3builder.py +++ b/nisext/py3builder.py @@ -1,6 +1,5 @@ """ distutils utilities for porting to python 3 within 2-compatible tree """ -from __future__ import division, print_function, absolute_import import sys import re diff --git a/nisext/testers.py b/nisext/testers.py index a80bbd904b..e0ca4a040a 100644 --- a/nisext/testers.py +++ b/nisext/testers.py @@ -29,7 +29,6 @@ ''' -from __future__ import print_function import os import sys diff --git a/nisext/tests/test_testers.py b/nisext/tests/test_testers.py index 336677b48f..08fa70cd1a 100644 --- a/nisext/tests/test_testers.py +++ b/nisext/tests/test_testers.py @@ -1,6 +1,5 @@ """ Tests for testers """ -from __future__ import division, print_function import os from os.path import dirname, pathsep diff --git a/tools/refresh_readme.py b/tools/refresh_readme.py index 59076442c7..577c10bd36 100755 --- a/tools/refresh_readme.py +++ b/tools/refresh_readme.py @@ -3,7 +3,6 @@ Should be run from nibabel root (containing setup.py) """ -from __future__ import print_function import os import runpy From b5fc1b1143c46cf645b9bc25b7fa0fb6beaaf6d8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 11:15:56 -0400 Subject: [PATCH 188/689] RF: Drop six.*metaclass --- nibabel/cifti2/cifti2_axes.py | 5 ++--- nibabel/streamlines/tractogram_file.py | 5 ++--- nibabel/tests/test_api_validators.py | 4 +--- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 30decec3d1..74f9611521 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -120,7 +120,7 @@ """ import numpy as np from . import cifti2 -from six import string_types, add_metaclass, integer_types +from six import string_types, integer_types from operator import xor import abc @@ -173,8 +173,7 @@ def to_header(axes): return cifti2.Cifti2Header(matrix) -@add_metaclass(abc.ABCMeta) -class Axis(object): +class Axis(abc.ABC): """ Abstract class for any object describing the rows or columns of a CIFTI-2 vector/matrix diff --git a/nibabel/streamlines/tractogram_file.py b/nibabel/streamlines/tractogram_file.py index d422560280..f8184c8ba9 100644 --- a/nibabel/streamlines/tractogram_file.py +++ b/nibabel/streamlines/tractogram_file.py @@ -1,7 +1,6 @@ """ Define abstract interface for Tractogram file classes """ -from abc import ABCMeta, abstractmethod -from six import with_metaclass +from abc import ABC, abstractmethod from .header import Field @@ -34,7 +33,7 @@ def __init__(self, callable): super(abstractclassmethod, self).__init__(callable) -class TractogramFile(with_metaclass(ABCMeta)): +class TractogramFile(ABC): """ Convenience class to encapsulate tractogram file format. """ def __init__(self, tractogram, header=None): diff --git a/nibabel/tests/test_api_validators.py b/nibabel/tests/test_api_validators.py index b47a195291..a7cbb8b555 100644 --- a/nibabel/tests/test_api_validators.py +++ b/nibabel/tests/test_api_validators.py @@ -1,8 +1,6 @@ """ Metaclass and class for validating instance APIs """ -from six import with_metaclass - from nose.tools import assert_equal @@ -32,7 +30,7 @@ def meth(self): return klass -class ValidateAPI(with_metaclass(validator2test)): +class ValidateAPI(metaclass=validator2test): """ A class to validate APIs Your job is twofold: From 199e7e0784949f4d6aff6877adf4a65e9159e5a7 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 11:23:06 -0400 Subject: [PATCH 189/689] RF: Drop six.*IO --- nibabel/cmdline/tests/test_utils.py | 2 +- nibabel/tests/test_analyze.py | 2 +- nibabel/tests/test_batteryrunners.py | 2 +- nibabel/tests/test_wrapstruct.py | 3 +-- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/nibabel/cmdline/tests/test_utils.py b/nibabel/cmdline/tests/test_utils.py index e701925870..199eea5d41 100644 --- a/nibabel/cmdline/tests/test_utils.py +++ b/nibabel/cmdline/tests/test_utils.py @@ -15,7 +15,7 @@ from os.path import (join as pjoin) from nibabel.testing import data_path from collections import OrderedDict -from six import StringIO +from io import StringIO def test_table2string(): diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 25ee778db9..5089f6c997 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -20,7 +20,7 @@ import numpy as np -from six import BytesIO, StringIO +from io import BytesIO, StringIO from ..spatialimages import (HeaderDataError, HeaderTypeError, supported_np_types) from ..analyze import AnalyzeHeader, AnalyzeImage diff --git a/nibabel/tests/test_batteryrunners.py b/nibabel/tests/test_batteryrunners.py index 71cbbba072..1130c2f4cb 100644 --- a/nibabel/tests/test_batteryrunners.py +++ b/nibabel/tests/test_batteryrunners.py @@ -9,7 +9,7 @@ ''' Tests for BatteryRunner and Report objects ''' -from six import StringIO +from io import StringIO import logging diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index 5e307067ab..45e8c28a52 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -26,8 +26,7 @@ import logging import numpy as np -from io import BytesIO -from six import StringIO +from io import BytesIO, StringIO from ..wrapstruct import WrapStructError, WrapStruct, LabeledWrapStruct from ..batteryrunners import Report From e4b8cfdf9dd3cbc0a0ac9be877f5c5c27c1168f4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 11:31:48 -0400 Subject: [PATCH 190/689] RF: Drop six.moves --- nibabel/affines.py | 2 +- nibabel/benchmarks/bench_streamlines.py | 1 - nibabel/data.py | 2 +- nibabel/eulerangles.py | 2 +- nibabel/fileslice.py | 2 +- nibabel/freesurfer/io.py | 5 ++--- nibabel/streamlines/tests/test_tractogram.py | 1 - nibabel/testing/__init__.py | 2 +- 8 files changed, 7 insertions(+), 10 deletions(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index 07154089a1..a7d7a4e9b8 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -5,7 +5,7 @@ import numpy as np -from six.moves import reduce +from functools import reduce class AffineError(ValueError): diff --git a/nibabel/benchmarks/bench_streamlines.py b/nibabel/benchmarks/bench_streamlines.py index 2bd6df2c46..fc1e39f8ad 100644 --- a/nibabel/benchmarks/bench_streamlines.py +++ b/nibabel/benchmarks/bench_streamlines.py @@ -16,7 +16,6 @@ import numpy as np -from six.moves import zip from nibabel.tmpdirs import InTemporaryDirectory from numpy.testing import assert_array_equal diff --git a/nibabel/data.py b/nibabel/data.py index 2a53f15f64..6208ebe7d5 100644 --- a/nibabel/data.py +++ b/nibabel/data.py @@ -8,7 +8,7 @@ from os.path import join as pjoin import glob import sys -from six.moves import configparser +import configparser from distutils.version import LooseVersion from .environment import get_nipy_user_dir, get_nipy_system_dir diff --git a/nibabel/eulerangles.py b/nibabel/eulerangles.py index eac1c046ed..0928cd39d3 100644 --- a/nibabel/eulerangles.py +++ b/nibabel/eulerangles.py @@ -85,7 +85,7 @@ import math -from six.moves import reduce +from functools import reduce import numpy as np diff --git a/nibabel/fileslice.py b/nibabel/fileslice.py index 61725d3a5a..af410a7e22 100644 --- a/nibabel/fileslice.py +++ b/nibabel/fileslice.py @@ -5,7 +5,7 @@ from numbers import Integral from mmap import mmap -from six.moves import reduce +from functools import reduce import numpy as np diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index 8326fb64c6..a48ba2c324 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -7,7 +7,6 @@ import time from collections import OrderedDict -from six.moves import xrange from ..openers import Opener @@ -431,7 +430,7 @@ def _read_annot_ctab_old_format(fobj, n_entries): orig_tab = orig_tab[:-1] names = list() ctab = np.zeros((n_entries, 5), dt) - for i in xrange(n_entries): + for i in range(n_entries): # structure name length + string name_length = np.fromfile(fobj, dt, 1)[0] name = np.fromfile(fobj, "|S%d" % name_length, 1)[0] @@ -482,7 +481,7 @@ def _read_annot_ctab_new_format(fobj, ctab_version): # number of LUT entries present in the file entries_to_read = np.fromfile(fobj, dt, 1)[0] names = list() - for _ in xrange(entries_to_read): + for _ in range(entries_to_read): # index of this entry idx = np.fromfile(fobj, dt, 1)[0] # structure name length + string diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 0fe83a26d7..888de0bd49 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -10,7 +10,6 @@ from nibabel.testing import clear_and_catch_warnings from nose.tools import assert_equal, assert_raises, assert_true from numpy.testing import assert_array_equal, assert_array_almost_equal -from six.moves import zip from .. import tractogram as module_tractogram from ..tractogram import is_data_dict, is_lazy_dict diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 74f2c99845..010e4d0ad1 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -29,7 +29,7 @@ except ImportError: pass -from six.moves import zip_longest +from itertools import zip_longest # set path to example data data_path = abspath(pjoin(dirname(__file__), '..', 'tests', 'data')) From f3ff932ad78421cb30ead6e75df683b70808a65c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 11:40:48 -0400 Subject: [PATCH 191/689] RF: Drop six.*_types --- nibabel/brikhead.py | 3 +-- nibabel/cifti2/cifti2_axes.py | 23 +++++++++++------------ nibabel/externals/netcdf.py | 6 ++---- nibabel/filebasedimages.py | 3 +-- nibabel/gifti/tests/test_gifti.py | 5 ++--- nibabel/nifti1.py | 3 +-- nibabel/optpkg.py | 3 +-- nibabel/streamlines/__init__.py | 3 +-- nibabel/tests/test_image_api.py | 3 +-- nibabel/tests/test_nifti1.py | 14 ++++++-------- nibabel/tests/test_proxy_api.py | 3 +-- 11 files changed, 28 insertions(+), 41 deletions(-) diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index ee06e6c119..49182ba705 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -33,7 +33,6 @@ import re import numpy as np -from six import string_types from .arrayproxy import ArrayProxy from .fileslice import strided_scalar @@ -203,7 +202,7 @@ def parse_AFNI_header(fobj): [1, 1, 1] """ # edge case for being fed a filename instead of a file object - if isinstance(fobj, string_types): + if isinstance(fobj, str): with open(fobj, 'rt') as src: return parse_AFNI_header(src) # unpack variables in HEAD file diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 74f9611521..05ab84e6ab 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -120,7 +120,6 @@ """ import numpy as np from . import cifti2 -from six import string_types, integer_types from operator import xor import abc @@ -288,7 +287,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, else: self.vertex = np.asanyarray(vertex, dtype=int) - if isinstance(name, string_types): + if isinstance(name, str): name = [self.to_cifti_brain_structure_name(name)] * self.vertex.size self.name = np.asanyarray(name, dtype='U') @@ -504,7 +503,7 @@ def to_cifti_brain_structure_name(name): """ if name in cifti2.CIFTI_BRAIN_STRUCTURES: return name - if not isinstance(name, string_types): + if not isinstance(name, str): if len(name) == 1: structure = name[0] orientation = 'both' @@ -588,7 +587,7 @@ def volume_shape(self, value): value = tuple(value) if len(value) != 3: raise ValueError("Volume shape should be a tuple of length 3") - if not all(isinstance(v, integer_types) for v in value): + if not all(isinstance(v, int) for v in value): raise ValueError("All elements of the volume shape should be integers") self._volume_shape = value @@ -678,9 +677,9 @@ def __getitem__(self, item): Otherwise returns a new BrainModelAxis """ - if isinstance(item, integer_types): + if isinstance(item, int): return self.get_element(item) - if isinstance(item, string_types): + if isinstance(item, str): raise IndexError("Can not index an Axis with a string (except for ParcelsAxis)") return self.__class__(self.name[item], self.voxel[item], self.vertex[item], self.affine, self.volume_shape, self.nvertices) @@ -913,7 +912,7 @@ def volume_shape(self, value): value = tuple(value) if len(value) != 3: raise ValueError("Volume shape should be a tuple of length 3") - if not all(isinstance(v, integer_types) for v in value): + if not all(isinstance(v, int) for v in value): raise ValueError("All elements of the volume shape should be integers") self._volume_shape = value @@ -988,14 +987,14 @@ def __getitem__(self, item): - `string`: 2-element tuple of (parcel voxels, parcel vertices - other object that can index 1D arrays: new Parcel axis """ - if isinstance(item, string_types): + if isinstance(item, str): idx = np.where(self.name == item)[0] if len(idx) == 0: raise IndexError("Parcel %s not found" % item) if len(idx) > 1: raise IndexError("Multiple parcels with name %s found" % item) return self.voxels[idx[0]], self.vertices[idx[0]] - if isinstance(item, integer_types): + if isinstance(item, int): return self.get_element(item) return self.__class__(self.name[item], self.voxels[item], self.vertices[item], self.affine, self.volume_shape, self.nvertices) @@ -1124,7 +1123,7 @@ def __add__(self, other): ) def __getitem__(self, item): - if isinstance(item, integer_types): + if isinstance(item, int): return self.get_element(item) return self.__class__(self.name[item], self.meta[item]) @@ -1269,7 +1268,7 @@ def __add__(self, other): ) def __getitem__(self, item): - if isinstance(item, integer_types): + if isinstance(item, int): return self.get_element(item) return self.__class__(self.name[item], self.label[item], self.meta[item]) @@ -1436,7 +1435,7 @@ def __getitem__(self, item): nelements = 0 return SeriesAxis(idx_start * self.step + self.start, self.step * step, nelements, self.unit) - elif isinstance(item, integer_types): + elif isinstance(item, int): return self.get_element(item) raise IndexError('SeriesAxis can only be indexed with integers or slices ' 'without breaking the regular structure') diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index a19ef9bb8a..d665e33e82 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -40,8 +40,6 @@ from numpy import little_endian as LITTLE_ENDIAN from functools import reduce -from six import integer_types - ABSENT = b'\x00\x00\x00\x00\x00\x00\x00\x00' ZERO = b'\x00\x00\x00\x00' @@ -479,8 +477,8 @@ def _write_values(self, values): if hasattr(values, 'dtype'): nc_type = REVERSE[values.dtype.char, values.dtype.itemsize] else: - types = [(t, NC_INT) for t in integer_types] - types += [ + types = [ + (int, NC_INT), (float, NC_FLOAT), (str, NC_CHAR), ] diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index c17701bc2e..64b79550e3 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -10,7 +10,6 @@ import io from copy import deepcopy -from six import string_types from .fileholders import FileHolder from .filename_parser import (types_filenames, TypesFilenamesError, splitext_addext) @@ -374,7 +373,7 @@ def make_file_map(klass, mapping=None): for key, ext in klass.files_types: file_map[key] = FileHolder() mapval = mapping.get(key, None) - if isinstance(mapval, string_types): + if isinstance(mapval, str): file_map[key].filename = mapval elif hasattr(mapval, 'tell'): file_map[key].fileobj = mapval diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index f1285d441d..d6367b30c8 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -7,7 +7,6 @@ import numpy as np import nibabel as nib -from six import string_types from nibabel.gifti import (GiftiImage, GiftiDataArray, GiftiLabel, GiftiLabelTable, GiftiMetaData, GiftiNVPairs, GiftiCoordSystem) @@ -176,11 +175,11 @@ def test_to_xml_open_close_deprecations(): da = GiftiDataArray(np.ones((1,)), 'triangle') with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) - assert_true(isinstance(da.to_xml_open(), string_types)) + assert_true(isinstance(da.to_xml_open(), str)) assert_equal(len(w), 1) with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) - assert_true(isinstance(da.to_xml_close(), string_types)) + assert_true(isinstance(da.to_xml_close(), str)) assert_equal(len(w), 1) diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 20ff7a909b..8031417085 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -12,7 +12,6 @@ ''' import warnings from io import BytesIO -from six import string_types import numpy as np import numpy.linalg as npl @@ -1389,7 +1388,7 @@ def set_intent(self, code, params=(), name='', allow_unknown=False): if not known_intent: # We can set intent via an unknown integer code, but can't via an # unknown string label - if not allow_unknown or isinstance(code, string_types): + if not allow_unknown or isinstance(code, str): raise KeyError('Unknown intent code: ' + str(code)) if known_intent: icode = intent_codes.code[code] diff --git a/nibabel/optpkg.py b/nibabel/optpkg.py index fb0e00179a..d52329f186 100644 --- a/nibabel/optpkg.py +++ b/nibabel/optpkg.py @@ -1,7 +1,6 @@ """ Routines to support optional packages """ import pkgutil from distutils.version import LooseVersion -from six import string_types from .tripwire import TripWire if pkgutil.find_loader('nose'): @@ -12,7 +11,7 @@ def _check_pkg_version(pkg, min_version): # Default version checking function - if isinstance(min_version, string_types): + if isinstance(min_version, str): min_version = LooseVersion(min_version) try: return min_version <= pkg.__version__ diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index 84d810367e..7f999ca19b 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -2,7 +2,6 @@ """ import os import warnings -from six import string_types from .header import Field from .array_sequence import ArraySequence @@ -57,7 +56,7 @@ def detect_format(fileobj): except IOError: pass - if isinstance(fileobj, string_types): + if isinstance(fileobj, str): _, ext = os.path.splitext(fileobj) return FORMATS.get(ext.lower()) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index a7cfb667e3..979b8777f9 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -27,7 +27,6 @@ import warnings from functools import partial from itertools import product -from six import string_types import numpy as np @@ -577,7 +576,7 @@ def validate_path_maybe_image(self, imaker, params): assert_true(isinstance(test, bool)) if sniff is not None: assert isinstance(sniff[0], bytes) - assert isinstance(sniff[1], string_types) + assert isinstance(sniff[1], str) class MakeImageAPI(LoadImageAPI): diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 1983fb6a9e..38863e9aa2 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -11,8 +11,6 @@ import warnings import struct -import six - import numpy as np from nibabel import nifti1 as nifti1 @@ -942,16 +940,16 @@ def test_set_sform(self): def test_sqform_code_type(self): # make sure get_s/qform returns codes as integers img = self.image_class(np.zeros((2, 3, 4)), None) - assert isinstance(img.get_sform(coded=True)[1], six.integer_types) - assert isinstance(img.get_qform(coded=True)[1], six.integer_types) + assert isinstance(img.get_sform(coded=True)[1], int) + assert isinstance(img.get_qform(coded=True)[1], int) img.set_sform(None, 3) img.set_qform(None, 3) - assert isinstance(img.get_sform(coded=True)[1], six.integer_types) - assert isinstance(img.get_qform(coded=True)[1], six.integer_types) + assert isinstance(img.get_sform(coded=True)[1], int) + assert isinstance(img.get_qform(coded=True)[1], int) img.set_sform(None, 2.0) img.set_qform(None, 4.0) - assert isinstance(img.get_sform(coded=True)[1], six.integer_types) - assert isinstance(img.get_qform(coded=True)[1], six.integer_types) + assert isinstance(img.get_sform(coded=True)[1], int) + assert isinstance(img.get_qform(coded=True)[1], int) img.set_sform(None, img.get_sform(coded=True)[1]) img.set_qform(None, img.get_qform(coded=True)[1]) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 479dd631a4..f1a1248c61 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -35,7 +35,6 @@ import numpy as np -from six import string_types from ..volumeutils import apply_read_scaling from ..analyze import AnalyzeHeader from ..spm99analyze import Spm99AnalyzeHeader @@ -152,7 +151,7 @@ def validate_header_isolated(self, pmaker, params): def validate_fileobj_isolated(self, pmaker, params): # Check file position of read independent of file-like object prox, fio, hdr = pmaker() - if isinstance(fio, string_types): + if isinstance(fio, str): return assert_array_equal(prox, params['arr_out']) fio.read() # move to end of file From 541d1d66a5b98bae266dcdbc8edd6a0201e9957c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 12:05:35 -0400 Subject: [PATCH 192/689] MAINT: Drop nibabel.externals.six --- COPYING | 31 --------------------------- nibabel/externals/__init__.py | 3 --- nibabel/externals/six.py | 11 ---------- nibabel/externals/tests/test_six.py | 33 ----------------------------- 4 files changed, 78 deletions(-) delete mode 100644 nibabel/externals/six.py delete mode 100644 nibabel/externals/tests/test_six.py diff --git a/COPYING b/COPYING index 8511235733..aadf96e90c 100644 --- a/COPYING +++ b/COPYING @@ -218,34 +218,3 @@ the PDDL version 1.0 available at http://opendatacommons.org/licenses/pddl/1.0/ is courtesy of the University of Massachusetts Medical School, also released under the PDDL. - - -Six --------------------- - -In ``nibabel/externals/six.py`` - -Copied from: https://pypi.python.org/packages/source/s/six/six-1.3.0.tar.gz#md5=ec47fe6070a8a64c802363d2c2b1e2ee - -:: - - Copyright (c) 2010-2013 Benjamin Peterson - - Permission is hereby granted, free of charge, to any person obtaining a copy of - this software and associated documentation files (the "Software"), to deal in - the Software without restriction, including without limitation the rights to - use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of - the Software, and to permit persons to whom the Software is furnished to do so, - subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS - FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR - COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER - IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN - CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - diff --git a/nibabel/externals/__init__.py b/nibabel/externals/__init__.py index 4c31772bb5..0eefb918c9 100644 --- a/nibabel/externals/__init__.py +++ b/nibabel/externals/__init__.py @@ -1,5 +1,2 @@ # init for externals package from collections import OrderedDict - -from ..deprecated import ModuleProxy as _ModuleProxy -six = _ModuleProxy('nibabel.externals.six') diff --git a/nibabel/externals/six.py b/nibabel/externals/six.py deleted file mode 100644 index d93ec84e8d..0000000000 --- a/nibabel/externals/six.py +++ /dev/null @@ -1,11 +0,0 @@ -""" Shim allowing some grace time for removal of six.py copy """ -# Remove around version 4.0 - -import warnings - -warnings.warn("We no longer carry a copy of the 'six' package in nibabel; " - "Please import the 'six' package directly", - FutureWarning, - stacklevel=2) - -from six import * # noqa diff --git a/nibabel/externals/tests/test_six.py b/nibabel/externals/tests/test_six.py deleted file mode 100644 index 35db2ca851..0000000000 --- a/nibabel/externals/tests/test_six.py +++ /dev/null @@ -1,33 +0,0 @@ -""" Test we are deprecating externals.six import -""" - -import warnings -import types - -from nose.tools import assert_true, assert_equal - -from nibabel.deprecated import ModuleProxy - - -def test_old_namespace(): - with warnings.catch_warnings(record=True) as warns: - # Top level import. - # This import does not trigger an import of the six.py module, because - # it's the proxy object. - from nibabel.externals import six - assert_equal(warns, []) - # If there was a previous import it will be module, otherwise it will be - # a proxy. - previous_import = isinstance(six, types.ModuleType) - if not previous_import: - assert_true(isinstance(six, ModuleProxy)) - shim_BytesIO = six.BytesIO # just to check it works - # There may or may not be a warning raised on accessing the proxy, - # depending on whether the externals.six.py module is already imported - # in this test run. - if not previous_import: - assert_equal(warns.pop(0).category, FutureWarning) - from six import BytesIO - assert_equal(warns, []) - # The import from old module is the same as that from new - assert_true(shim_BytesIO is BytesIO) From 66ed14e417d0c03facb6f7e58fbcff07436d4726 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 12:07:25 -0400 Subject: [PATCH 193/689] MAINT: Drop six dependency --- doc/source/installation.rst | 1 - min-requirements.txt | 1 - requirements.txt | 1 - setup.cfg | 1 - 4 files changed, 4 deletions(-) diff --git a/doc/source/installation.rst b/doc/source/installation.rst index fa261e726d..5165fa18d0 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -85,7 +85,6 @@ Requirements * Python_ 3.5 or greater * NumPy_ 1.12 or greater -* Six_ 1.7 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) diff --git a/min-requirements.txt b/min-requirements.txt index a0042b2e49..ed4ed75bf2 100644 --- a/min-requirements.txt +++ b/min-requirements.txt @@ -1,3 +1,2 @@ # Auto-generated by tools/update_requirements.py numpy ==1.12 -six ==1.7 diff --git a/requirements.txt b/requirements.txt index 8a1554b73d..365f19556b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ # Auto-generated by tools/update_requirements.py numpy >=1.12 -six >=1.7 diff --git a/setup.cfg b/setup.cfg index ebf7609909..5d00b4507d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -32,7 +32,6 @@ provides = python_requires = >=3.5 install_requires = numpy >=1.12 - six >=1.7 tests_require = nose >=0.10.1 mock From 3303a52d5924bb63801703ac853243b084ba6803 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 12:40:45 -0400 Subject: [PATCH 194/689] RF: Drop code explicitly tagged as PY2 --- nibabel/cifti2/cifti2.py | 6 +----- nibabel/externals/oset.py | 6 +----- nibabel/streamlines/tractogram.py | 7 +------ nibabel/tests/test_fileslice.py | 13 ------------- nibabel/tests/test_floating.py | 7 ------- 5 files changed, 3 insertions(+), 36 deletions(-) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 8c4f26d5bd..1a5307eba5 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -17,11 +17,7 @@ http://www.nitrc.org/projects/cifti ''' import re -try: - from collections.abc import MutableSequence, MutableMapping, Iterable -except ImportError: - # PY2 compatibility - from collections import MutableSequence, MutableMapping, Iterable +from collections.abc import MutableSequence, MutableMapping, Iterable from collections import OrderedDict from .. import xmlutils as xml from ..filebasedimages import FileBasedHeader diff --git a/nibabel/externals/oset.py b/nibabel/externals/oset.py index 43b131246d..0a29c661c5 100644 --- a/nibabel/externals/oset.py +++ b/nibabel/externals/oset.py @@ -14,11 +14,7 @@ """ -try: - from collections.abc import MutableSet -except ImportError: - # PY2 compatibility - from collections import MutableSet +from collections.abc import MutableSet KEY, PREV, NEXT = range(3) diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index 209ed27c26..11d72ac78a 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -2,12 +2,7 @@ import numbers import numpy as np from warnings import warn - -try: - from collections.abc import MutableMapping -except ImportError: - # PY2 compatibility - from collections import MutableMapping +from collections.abc import MutableMapping from nibabel.affines import apply_affine diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index e9cfe8e0a4..2b804195ef 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -2,8 +2,6 @@ import sys -PY2 = sys.version_info[0] < 3 - from io import BytesIO from itertools import product from functools import partial @@ -250,9 +248,6 @@ def test_threshold_heuristic(): assert_equal(threshold_heuristic(1, 9, 1, skip_thresh=7), None) assert_equal(threshold_heuristic(1, 9, 2, skip_thresh=16), 'full') assert_equal(threshold_heuristic(1, 9, 2, skip_thresh=15), None) - # long if on Python 2 - if PY2: - assert_equal(threshold_heuristic(long(1), 9, 1, skip_thresh=8), 'full') # full slice, smallest step size assert_equal(threshold_heuristic( slice(0, 9, 1), 9, 2, skip_thresh=2), @@ -515,10 +510,6 @@ def test_optimize_read_slicers(): assert_equal(optimize_read_slicers( (1, 2, 3), (2, 3, 4), 4, _always), ((sn, sn, 3), (1, 2))) - if PY2: # Check we can pass in longs as well - assert_equal(optimize_read_slicers( - (long(1), long(2), long(3)), (2, 3, 4), 4, _always), - ((sn, sn, 3), (1, 2))) def test_slicers2segments(): @@ -540,10 +531,6 @@ def test_slicers2segments(): assert_equal(slicers2segments( (slice(None), slice(None), 2), (10, 6, 4), 7, 4), [[7 + 10 * 6 * 2 * 4, 10 * 6 * 4]]) - if PY2: # Check we can pass longs on Python 2 - assert_equal( - slicers2segments((long(0), long(1), long(2)), (10, 6, 4), 7, 4), - [[7 + 10 * 4 + 10 * 6 * 2 * 4, 4]]) def test_calc_slicedefs(): diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index 96376270b1..7e0f14702a 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -2,8 +2,6 @@ """ import sys -PY2 = sys.version_info[0] < 3 - from distutils.version import LooseVersion import numpy as np @@ -186,11 +184,6 @@ def test_int_to_float(): i = 2**(nmant + 1) - 1 assert_equal(as_int(int_to_float(i, LD)), i) assert_equal(as_int(int_to_float(-i, LD)), -i) - # Test no error for longs - if PY2: - i = long(i) - assert_equal(as_int(int_to_float(i, LD)), i) - assert_equal(as_int(int_to_float(-i, LD)), -i) # If longdouble can cope with 2**64, test if nmant >= 63: # Check conversion to int; the line below causes an error subtracting From 33862e4845ef16515c9844608ac4c6c918d079a3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 12:55:31 -0400 Subject: [PATCH 195/689] RF: Drop python < 3.5.1 hacks in openers --- nibabel/openers.py | 61 +++------------------------------------------- setup.cfg | 4 +-- 2 files changed, 4 insertions(+), 61 deletions(-) diff --git a/nibabel/openers.py b/nibabel/openers.py index e551404561..199f9d22a5 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -10,10 +10,7 @@ """ import sys -if sys.version_info[0] < 3: - from bz2file import BZ2File -else: - from bz2 import BZ2File +from bz2 import BZ2File import gzip import sys import warnings @@ -43,51 +40,6 @@ HAVE_INDEXED_GZIP = False -# The largest memory chunk that gzip can use for reads -GZIP_MAX_READ_CHUNK = 100 * 1024 * 1024 # 100Mb - - -class BufferedGzipFile(gzip.GzipFile): - """GzipFile able to readinto buffer >= 2**32 bytes. - - This class only differs from gzip.GzipFile - in Python 3.5.0. - - This works around a known issue in Python 3.5. - See https://bugs.python.org/issue25626 - """ - - # This helps avoid defining readinto in Python 2.6, - # where it is undefined on gzip.GzipFile. - # It also helps limit the exposure to this code. - if sys.version_info[:3] == (3, 5, 0): - def __init__(self, fileish, mode='rb', compresslevel=9, - buffer_size=2**32 - 1): - super(BufferedGzipFile, self).__init__(fileish, mode=mode, - compresslevel=compresslevel) - self.buffer_size = buffer_size - - def readinto(self, buf): - """Uses self.buffer_size to do a buffered read.""" - n_bytes = len(buf) - if n_bytes < 2 ** 32: - return super(BufferedGzipFile, self).readinto(buf) - - # This works around a known issue in Python 3.5. - # See https://bugs.python.org/issue25626 - mv = memoryview(buf) - n_read = 0 - max_read = 2 ** 32 - 1 # Max for unsigned 32-bit integer - while (n_read < n_bytes): - n_wanted = min(n_bytes - n_read, max_read) - n_got = super(BufferedGzipFile, self).readinto( - mv[n_read:n_read + n_wanted]) - n_read += n_got - if n_got != n_wanted: - break - return n_read - - def _gzip_open(filename, mode='rb', compresslevel=9, keep_open=False): # use indexed_gzip if possible for faster read access. If keep_open == @@ -96,16 +48,9 @@ def _gzip_open(filename, mode='rb', compresslevel=9, keep_open=False): if HAVE_INDEXED_GZIP and mode == 'rb': gzip_file = IndexedGzipFile(filename, drop_handles=not keep_open) - # Fall-back to built-in GzipFile (wrapped with the BufferedGzipFile class - # defined above) + # Fall-back to built-in GzipFile else: - gzip_file = BufferedGzipFile(filename, mode, compresslevel) - - # Speedup for #209, for versions of python < 3.5. Open gzip files with - # faster reads on large files using a larger read buffer. See - # https://github.com/nipy/nibabel/pull/210 for discussion - if hasattr(gzip_file, 'max_read_chunk'): - gzip_file.max_read_chunk = GZIP_MAX_READ_CHUNK + gzip_file = gzip.GzipFile(filename, mode, compresslevel) return gzip_file diff --git a/setup.cfg b/setup.cfg index 5d00b4507d..7ba13660eb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,8 +13,6 @@ classifiers = License :: OSI Approved :: MIT License Operating System :: OS Independent Programming Language :: Python - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 @@ -29,7 +27,7 @@ provides = nisext [options] -python_requires = >=3.5 +python_requires = >=3.5.1 install_requires = numpy >=1.12 tests_require = From fdcb7f4b315e4623356bc24b1968d3fdc7cbdd71 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:17:20 -0400 Subject: [PATCH 196/689] TEST: Duplicate assertion --- nibabel/tests/test_testing.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index d1c0dbeff6..f770ac4b0e 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -13,18 +13,6 @@ get_fresh_mod, assert_re_in) -def assert_warn_len_equal(mod, n_in_context): - mod_warns = mod.__warningregistry__ - # Python 3.4 appears to clear any pre-existing warnings of the same type, - # when raising warnings inside a catch_warnings block. So, there is a - # warning generated by the tests within the context manager, but no - # previous warnings. - if 'version' in mod_warns: - assert_equal(len(mod_warns), 2) # including 'version' - else: - assert_equal(len(mod_warns), n_in_context) - - def test_assert_allclose_safely(): # Test the safe version of allclose assert_allclose_safely([1, 1], [1, 1]) From 013741f10a64bd282c19e71e2ae7f5b9425dc905 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:26:21 -0400 Subject: [PATCH 197/689] DOC: Update docstrings without Py2/3 discussion --- nibabel/fileutils.py | 5 ++--- nibabel/freesurfer/io.py | 2 +- nibabel/gifti/gifti.py | 7 +------ nibabel/tests/scriptrunner.py | 4 ++-- nibabel/trackvis.py | 2 +- nibabel/volumeutils.py | 4 ++-- 6 files changed, 9 insertions(+), 15 deletions(-) diff --git a/nibabel/fileutils.py b/nibabel/fileutils.py index be9c214616..b88e2f7128 100644 --- a/nibabel/fileutils.py +++ b/nibabel/fileutils.py @@ -23,9 +23,8 @@ def read_zt_byte_strings(fobj, n_strings=1, bufsize=1024): Parameters ---------- f : fileobj - File object to use. Should implement ``read``, returning byte objects - (str in Python 2), and ``seek(n, 1)`` to seek from current file - position. + File object to use. Should implement ``read``, returning byte objects, + and ``seek(n, 1)`` to seek from current file position. n_strings : int, optional Number of byte strings to return bufsize: int, optional diff --git a/nibabel/freesurfer/io.py b/nibabel/freesurfer/io.py index a48ba2c324..f8d2442662 100644 --- a/nibabel/freesurfer/io.py +++ b/nibabel/freesurfer/io.py @@ -355,7 +355,7 @@ def read_annot(filepath, orig_ids=False): to any label and orig_ids=False, its id will be set to -1. ctab : ndarray, shape (n_labels, 5) RGBT + label id colortable array. - names : list of str (python 2), list of bytes (python 3) + names : list of bytes The names of the labels. The length of the list is n_labels. """ with open(filepath, "rb") as fobj: diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 5e5cd36c5d..95b0c4133a 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -13,8 +13,8 @@ """ import sys - import numpy as np +import base64 from .. import xmlutils as xml from ..filebasedimages import SerializableImage @@ -23,11 +23,6 @@ gifti_endian_codes, KIND2FMT) from ..deprecated import deprecate_with_version -# {en,de}codestring in deprecated in Python3, but -# {en,de}codebytes not available in Python2. -# Therefore set the proper functions depending on the Python version. -import base64 - class GiftiMetaData(xml.XmlSerializable): """ A sequence of GiftiNVPairs containing metadata for a gifti data array diff --git a/nibabel/tests/scriptrunner.py b/nibabel/tests/scriptrunner.py index c5b37df80f..ea1daeabf5 100644 --- a/nibabel/tests/scriptrunner.py +++ b/nibabel/tests/scriptrunner.py @@ -112,9 +112,9 @@ def run_command(self, cmd, check_code=True): ------- returncode : int return code from execution of `cmd` - stdout : bytes (python 3) or str (python 2) + stdout : bytes stdout from `cmd` - stderr : bytes (python 3) or str (python 2) + stderr : bytes stderr from `cmd` """ if isinstance(cmd, string_types): diff --git a/nibabel/trackvis.py b/nibabel/trackvis.py index 114d96ccec..5077b739ac 100644 --- a/nibabel/trackvis.py +++ b/nibabel/trackvis.py @@ -325,7 +325,7 @@ def write(fileobj, streamlines, hdr_mapping=None, endianness=None, >>> pts1 = np.random.uniform(size=(10,3)) >>> streamlines = ([(pts0, None, None), (pts1, None, None)]) >>> write(file_obj, streamlines) - >>> _ = file_obj.seek(0) # returns 0 in python 3 + >>> _ = file_obj.seek(0) # returns 0 >>> streams, hdr = read(file_obj) >>> len(streams) 2 diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 6cd49cbb8e..2cc083ecb6 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -479,7 +479,7 @@ def array_from_file(shape, in_dtype, infile, offset=0, order='F', mmap=True): >>> from io import BytesIO >>> bio = BytesIO() >>> arr = np.arange(6).reshape(1,2,3) - >>> _ = bio.write(arr.tostring('F')) # outputs int in python3 + >>> _ = bio.write(arr.tostring('F')) # outputs int >>> arr2 = array_from_file((1,2,3), arr.dtype, bio) >>> np.all(arr == arr2) True @@ -609,7 +609,7 @@ def array_to_file(data, fileobj, out_dtype=None, offset=0, >>> array_to_file(data, sio, np.float) >>> sio.getvalue() == data.tostring('F') True - >>> _ = sio.truncate(0); _ = sio.seek(0) # outputs 0 in python 3 + >>> _ = sio.truncate(0); _ = sio.seek(0) # outputs 0 >>> array_to_file(data, sio, np.int16) >>> sio.getvalue() == data.astype(np.int16).tostring() True From 34a7caffa75a0aff006abea482b906756d440336 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:27:05 -0400 Subject: [PATCH 198/689] RF: Remove Python < 3.5 hacks --- nibabel/dft.py | 4 +-- nibabel/nicom/tests/test_csareader.py | 2 -- nibabel/tests/scriptrunner.py | 18 ++------------ nibabel/tests/test_arraywriters.py | 35 ++++++--------------------- nibabel/tests/test_rstutils.py | 5 ---- nibabel/trackvis.py | 11 ++------- 6 files changed, 13 insertions(+), 62 deletions(-) diff --git a/nibabel/dft.py b/nibabel/dft.py index 8d7da2366d..7b39d35b81 100644 --- a/nibabel/dft.py +++ b/nibabel/dft.py @@ -277,9 +277,7 @@ def __exit__(self, type, value, traceback): def _get_subdirs(base_dir, files_dict=None, followlinks=False): dirs = [] - # followlinks keyword not available for python 2.5. - kwargs = {} if not followlinks else {'followlinks': True} - for (dirpath, dirnames, filenames) in os.walk(base_dir, **kwargs): + for (dirpath, dirnames, filenames) in os.walk(base_dir, followlinks=followlinks): abs_dir = os.path.realpath(dirpath) if abs_dir in dirs: raise CachingError('link cycle detected under %s' % base_dir) diff --git a/nibabel/nicom/tests/test_csareader.py b/nibabel/nicom/tests/test_csareader.py index 592dd2ba54..a6bf589e90 100644 --- a/nibabel/nicom/tests/test_csareader.py +++ b/nibabel/nicom/tests/test_csareader.py @@ -130,8 +130,6 @@ def test_ice_dims(): @dicom_test -@skipif(sys.version_info < (2,7) and pydicom.__version__ < '1.0', - 'Known issue for python 2.6 and pydicom < 1.0') def test_missing_csa_elem(): # Test that we get None instead of raising an Exception when the file has # the PrivateCreator element for the CSA dict but not the element with the diff --git a/nibabel/tests/scriptrunner.py b/nibabel/tests/scriptrunner.py index ea1daeabf5..33b5e3dcef 100644 --- a/nibabel/tests/scriptrunner.py +++ b/nibabel/tests/scriptrunner.py @@ -18,22 +18,8 @@ from subprocess import Popen, PIPE -try: # Python 2 - string_types = basestring, -except NameError: # Python 3 - string_types = str, - -def _get_package(): - """ Workaround for missing ``__package__`` in Python 3.2 - """ - if '__package__' in globals() and not __package__ is None: - return __package__ - return __name__.split('.', 1)[0] - - -# Same as __package__ for Python 2.6, 2.7 and >= 3.3 -MY_PACKAGE = _get_package() +MY_PACKAGE = __package__ def local_script_dir(script_sdir): @@ -117,7 +103,7 @@ def run_command(self, cmd, check_code=True): stderr : bytes stderr from `cmd` """ - if isinstance(cmd, string_types): + if isinstance(cmd, str): cmd = [cmd] else: cmd = list(cmd) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index e1f72bc334..d5547e875f 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -3,9 +3,7 @@ See docstring of :mod:`nibabel.arraywriters` for API. """ -import sys from platform import python_compiler, machine -from distutils.version import LooseVersion import itertools import numpy as np @@ -33,8 +31,6 @@ IUINT_TYPES = INT_TYPES + UINT_TYPES NUMERIC_TYPES = CFLOAT_TYPES + IUINT_TYPES -NP_VERSION = LooseVersion(np.__version__) - def round_trip(writer, order='F', apply_scale=True): sio = BytesIO() @@ -65,29 +61,14 @@ def test_arraywriters(): assert_array_equal(arr, round_trip(aw)) # Byteswapped should be OK bs_arr = arr.byteswap().newbyteorder('S') - # Except on some numpies for complex256, where the array does not - # equal itself - if not np.all(bs_arr == arr): - assert_true(NP_VERSION <= LooseVersion('1.7.0')) - assert_true(on_powerpc()) - assert_true(type == np.complex256) - else: - bs_aw = klass(bs_arr) - bs_aw_rt = round_trip(bs_aw) - # On Ubuntu 13.04 with python 3.3 __eq__ comparison on - # arrays with complex numbers fails here for some - # reason -- not our fault, and to test correct operation we - # will just compare element by element - if NP_VERSION == '1.7.1' and sys.version_info[:2] == (3, 3): - assert_array_equal_ = lambda x, y: np.all([x_ == y_ for x_, y_ in zip(x, y)]) - else: - assert_array_equal_ = assert_array_equal - # assert against original array because POWER7 was running into - # trouble using the byteswapped array (bs_arr) - assert_array_equal_(arr, bs_aw_rt) - bs_aw2 = klass(bs_arr, arr.dtype) - bs_aw2_rt = round_trip(bs_aw2) - assert_array_equal(arr, bs_aw2_rt) + bs_aw = klass(bs_arr) + bs_aw_rt = round_trip(bs_aw) + # assert against original array because POWER7 was running into + # trouble using the byteswapped array (bs_arr) + assert_array_equal(arr, bs_aw_rt) + bs_aw2 = klass(bs_arr, arr.dtype) + bs_aw2_rt = round_trip(bs_aw2) + assert_array_equal(arr, bs_aw2_rt) # 2D array arr2 = np.reshape(arr, (2, 5)) a2w = klass(arr2) diff --git a/nibabel/tests/test_rstutils.py b/nibabel/tests/test_rstutils.py index 8952dde235..51103c45ca 100644 --- a/nibabel/tests/test_rstutils.py +++ b/nibabel/tests/test_rstutils.py @@ -1,12 +1,10 @@ """ Test printable table """ -import sys import numpy as np from ..rstutils import rst_table -from nose import SkipTest from nose.tools import assert_equal, assert_raises @@ -14,9 +12,6 @@ def test_rst_table(): # Tests for printable table function R, C = 3, 4 cell_values = np.arange(R * C).reshape((R, C)) - if (sys.version_info[:3] == (3, 2, 3) and np.__version__ == '1.6.1'): - raise SkipTest("Known (later fixed) bug in python3.2/numpy " - "treating np.int64 as str") assert_equal(rst_table(cell_values), """+--------+--------+--------+--------+--------+ | | col[0] | col[1] | col[2] | col[3] | diff --git a/nibabel/trackvis.py b/nibabel/trackvis.py index 5077b739ac..c1fa6d367b 100644 --- a/nibabel/trackvis.py +++ b/nibabel/trackvis.py @@ -18,11 +18,6 @@ from .affines import apply_affine from .deprecated import deprecate_with_version -try: - basestring -except NameError: # python 3 - basestring = str - warnings.warn("The trackvis interface has been deprecated and will be removed " "in v4.0; please use the 'nibabel.streamlines' interface.", DeprecationWarning, @@ -831,15 +826,13 @@ def __init__(self, @classmethod def from_file(klass, file_like, points_space=None): streamlines, header = read(file_like, points_space=points_space) - filename = (file_like if isinstance(file_like, basestring) - else None) + filename = file_like if isinstance(file_like, str) else None return klass(streamlines, header, None, filename, points_space) def to_file(self, file_like): write(file_like, self.streamlines, self.header, self.endianness, points_space=self.points_space) - self.filename = (file_like if isinstance(file_like, basestring) - else None) + self.filename = file_like if isinstance(file_like, str) else None def get_affine(self, atleast_v2=True): """ Get affine from header in object From 16a0c8fb79a24bdf3bb1eb71e6ef3de12e7fc6dd Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:29:40 -0400 Subject: [PATCH 199/689] TEST: Drop BZ2 mio error checks (Python 3.3 only) --- nibabel/tests/test_analyze.py | 6 +----- nibabel/tests/test_helpers.py | 27 --------------------------- nibabel/tests/test_spm99analyze.py | 4 +--- 3 files changed, 2 insertions(+), 35 deletions(-) diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 5089f6c997..45a4c00d62 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -698,8 +698,6 @@ class TestAnalyzeImage(tsi.TestSpatialImage, tsi.MmapImageMixin): image_class = AnalyzeImage can_save = True supported_np_types = TestAnalyzeHeader.supported_np_types - # Flag to skip bz2 save tests if they are going to break - bad_bz2 = False def test_supported_types(self): img = self.image_class(np.zeros((2, 3, 4)), np.eye(4)) @@ -794,9 +792,7 @@ def test_big_offset_exts(self): arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) aff = np.eye(4) img_ext = img_klass.files_types[0][1] - compressed_exts = ['', '.gz'] - if not self.bad_bz2: - compressed_exts.append('.bz2') + compressed_exts = ['', '.gz', '.bz2'] with InTemporaryDirectory(): for offset in (0, 2048): # Set offset in in-memory image diff --git a/nibabel/tests/test_helpers.py b/nibabel/tests/test_helpers.py index 7b05a4d666..928b4bd1a3 100644 --- a/nibabel/tests/test_helpers.py +++ b/nibabel/tests/test_helpers.py @@ -31,33 +31,6 @@ def bytesio_round_trip(img): return klass.from_file_map(bytes_map) -def bz2_mio_error(): - """ Return True if writing mat 4 file fails - - Writing an empty string can fail for bz2 objects in python 3.3: - - https://bugs.python.org/issue16828 - - This in turn causes scipy to give this error when trying to write bz2 mat - files. - - This won't cause a problem for scipy releases after Jan 24 2014 because of - commit 98ef522d99 (in scipy) - """ - if not have_scipy: - return True - import scipy.io - - with InTemporaryDirectory(): - with ImageOpener('test.mat.bz2', 'wb') as fobj: - try: - scipy.io.savemat(fobj, {'a': 1}, format='4') - except ValueError: - return True - else: - return False - - def assert_data_similar(arr, params): """ Check data is the same if recorded, otherwise check summaries diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 5ee94e98c2..137d3b0451 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -31,7 +31,7 @@ from ..testing import assert_allclose_safely, suppress_warnings from . import test_analyze -from .test_helpers import (bytesio_round_trip, bytesio_filemap, bz2_mio_error) +from .test_helpers import bytesio_round_trip, bytesio_filemap FLOAT_TYPES = np.sctypes['float'] COMPLEX_TYPES = np.sctypes['complex'] @@ -404,8 +404,6 @@ def test_nan2zero_range_ok(self): class TestSpm99AnalyzeImage(test_analyze.TestAnalyzeImage, ImageScalingMixin): # class for testing images image_class = Spm99AnalyzeImage - # Flag to skip bz2 save tests if they are going to break - bad_bz2 = bz2_mio_error() # Decorating the old way, before the team invented @ test_data_hdr_cache = (scipy_skip( From e39872cdedcd0804b464cc12022b0867f32090cd Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:31:44 -0400 Subject: [PATCH 200/689] RF: Use builtin FileNotFoundError --- nibabel/loadsave.py | 1 - nibabel/tests/test_loadsave.py | 1 - 2 files changed, 2 deletions(-) diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 8c3041e73c..cd1efbe3d7 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -17,7 +17,6 @@ from .filebasedimages import ImageFileError from .imageclasses import all_image_classes from .arrayproxy import is_proxy -from .py3k import FileNotFoundError from .deprecated import deprecate_with_version diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 57464a33a5..4c1c703389 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -21,7 +21,6 @@ from nose.tools import (assert_true, assert_false, assert_raises, assert_equal, assert_not_equal) -from ..py3k import FileNotFoundError data_path = pjoin(dirname(__file__), 'data') From a407bf30bd3a55a49e236ffbf06776f48bcfefa8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:33:05 -0400 Subject: [PATCH 201/689] STY: Unused import --- nibabel/openers.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/nibabel/openers.py b/nibabel/openers.py index 199f9d22a5..e0706c4998 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -9,10 +9,8 @@ """ Context manager openers for various fileobject types """ -import sys from bz2 import BZ2File import gzip -import sys import warnings from os.path import splitext from distutils.version import StrictVersion From 9b437137b0afee9f86131f59f5d96f95cf8d4445 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 13:58:59 -0400 Subject: [PATCH 202/689] RF: Import from numpy.compat.py3k when needed --- nibabel/cmdline/utils.py | 4 +- nibabel/externals/netcdf.py | 2 +- nibabel/nicom/utils.py | 2 +- nibabel/nifti1.py | 2 +- nibabel/py3k.py | 93 ++----------------- nibabel/streamlines/tck.py | 2 +- nibabel/streamlines/tests/test_streamlines.py | 2 +- nibabel/streamlines/trk.py | 3 +- nibabel/tests/test_openers.py | 2 +- nibabel/tests/test_optpkg.py | 2 +- nibabel/trackvis.py | 2 +- 11 files changed, 18 insertions(+), 98 deletions(-) diff --git a/nibabel/cmdline/utils.py b/nibabel/cmdline/utils.py index e14c860274..57c0ccc286 100644 --- a/nibabel/cmdline/utils.py +++ b/nibabel/cmdline/utils.py @@ -18,8 +18,6 @@ import numpy as np -from nibabel.py3k import asunicode - verbose_level = 0 @@ -100,7 +98,7 @@ def table2string(table, out=None): string_ += "%%%ds%%s%%%ds " \ % (nspacesl, nspacesr) % ('', item, '') string += string_.rstrip() + '\n' - out.write(asunicode(string)) + out.write(string) if print2string: value = out.getvalue() diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index d665e33e82..aca62aac80 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -35,7 +35,7 @@ from mmap import mmap, ACCESS_READ import numpy as np # noqa -from ..py3k import asbytes, asstr +from numpy.compat.py3k import asbytes, asstr from numpy import frombuffer, ndarray, dtype, empty, array, asarray from numpy import little_endian as LITTLE_ENDIAN from functools import reduce diff --git a/nibabel/nicom/utils.py b/nibabel/nicom/utils.py index 7673ada63e..f1d5810775 100644 --- a/nibabel/nicom/utils.py +++ b/nibabel/nicom/utils.py @@ -1,7 +1,7 @@ """ Utilities for working with DICOM datasets """ -from ..py3k import asstr +from numpy.compat.py3k import asstr def find_private_section(dcm_data, group_no, creator): diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 8031417085..3979f5b96c 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -15,8 +15,8 @@ import numpy as np import numpy.linalg as npl +from numpy.compat.py3k import asstr -from .py3k import asstr from .filebasedimages import SerializableImage from .volumeutils import Recoder, make_dt_codes, endian_codes from .spatialimages import HeaderDataError, ImageFileError diff --git a/nibabel/py3k.py b/nibabel/py3k.py index bd55158d30..02dd1f16e7 100644 --- a/nibabel/py3k.py +++ b/nibabel/py3k.py @@ -1,88 +1,9 @@ -""" -Python 3 compatibility tools. +import warnings -Copied from numpy/compat/py3k. +warnings.warn("We no longer carry a copy of the 'py3k' module in nibabel; " + "Please import from the 'numpy.compat.py3k' module directly. " + "Full removal scheduled for nibabel 4.0.", + FutureWarning, + stacklevel=2) -Please prefer the routines in the six module when possible. - -BSD license -""" - -__all__ = ['bytes', 'asbytes', 'isfileobj', 'getexception', 'strchar', - 'unicode', 'asunicode', 'asbytes_nested', 'asunicode_nested', - 'asstr', 'open_latin1', 'StringIO', 'BytesIO'] - -import sys - -if sys.version_info[0] >= 3: - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - bytes = bytes - unicode = str - asunicode = str - - def asbytes(s): - if isinstance(s, bytes): - return s - return s.encode('latin1') - - def asstr(s): - if isinstance(s, str): - return s - return s.decode('latin1') - - def isfileobj(f): - return isinstance(f, io.FileIO) - - def open_latin1(filename, mode='r'): - return open(filename, mode=mode, encoding='iso-8859-1') - strchar = 'U' - ints2bytes = lambda seq: bytes(seq) - ZEROB = bytes([0]) - FileNotFoundError = FileNotFoundError - import builtins -else: - import StringIO - StringIO = BytesIO = StringIO.StringIO - bytes = str - unicode = unicode - asbytes = str - asstr = str - strchar = 'S' - - def isfileobj(f): - return isinstance(f, file) - - def asunicode(s): - if isinstance(s, unicode): - return s - return s.decode('ascii') - - def open_latin1(filename, mode='r'): - return open(filename, mode=mode) - ints2bytes = lambda seq: ''.join(chr(i) for i in seq) - ZEROB = chr(0) - - class FileNotFoundError(IOError): - pass - - import __builtin__ as builtins # noqa - - -def getexception(): - return sys.exc_info()[1] - - -def asbytes_nested(x): - if hasattr(x, '__iter__') and not isinstance(x, (bytes, unicode)): - return [asbytes_nested(y) for y in x] - else: - return asbytes(x) - - -def asunicode_nested(x): - if hasattr(x, '__iter__') and not isinstance(x, (bytes, unicode)): - return [asunicode_nested(y) for y in x] - else: - return asunicode(x) +from numpy.compat.py3k import * # noqa diff --git a/nibabel/streamlines/tck.py b/nibabel/streamlines/tck.py index 4d3a887ce9..ffcd2e437a 100644 --- a/nibabel/streamlines/tck.py +++ b/nibabel/streamlines/tck.py @@ -8,9 +8,9 @@ import warnings import numpy as np +from numpy.compat.py3k import asbytes, asstr from nibabel.openers import Opener -from nibabel.py3k import asbytes, asstr from .array_sequence import ArraySequence from .tractogram_file import TractogramFile diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 90a18f5acf..2f96e56843 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -8,7 +8,7 @@ import nibabel as nib from io import BytesIO from nibabel.tmpdirs import InTemporaryDirectory -from nibabel.py3k import asbytes +from numpy.compat.py3k import asbytes from nibabel.testing import data_path from nibabel.testing import clear_and_catch_warnings diff --git a/nibabel/streamlines/trk.py b/nibabel/streamlines/trk.py index aba689d7b3..f67ab1509a 100644 --- a/nibabel/streamlines/trk.py +++ b/nibabel/streamlines/trk.py @@ -8,10 +8,11 @@ import warnings import numpy as np +from numpy.compat.py3k import asstr + import nibabel as nib from nibabel.openers import Opener -from nibabel.py3k import asstr from nibabel.volumeutils import (native_code, swapped_code, endian_codes) from nibabel.orientations import (aff2axcodes, axcodes2ornt) diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 6aeb66aaf7..69704eaeb1 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -13,7 +13,7 @@ from io import BytesIO, UnsupportedOperation from distutils.version import StrictVersion -from ..py3k import asstr, asbytes +from numpy.compat.py3k import asstr, asbytes from ..openers import Opener, ImageOpener, HAVE_INDEXED_GZIP, BZ2File from ..tmpdirs import InTemporaryDirectory from ..volumeutils import BinOpener diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index c0930e848a..99f90b5de6 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -4,6 +4,7 @@ import mock import types import sys +import builtins from distutils.version import LooseVersion from nose import SkipTest @@ -11,7 +12,6 @@ assert_equal) -from nibabel.py3k import builtins from nibabel.optpkg import optional_package from nibabel.tripwire import TripWire, TripWireError diff --git a/nibabel/trackvis.py b/nibabel/trackvis.py index c1fa6d367b..691ad7b537 100644 --- a/nibabel/trackvis.py +++ b/nibabel/trackvis.py @@ -10,8 +10,8 @@ import numpy as np import numpy.linalg as npl +from numpy.compat.py3k import asstr -from .py3k import asstr from .volumeutils import (native_code, swapped_code, endian_codes, rec2dict) from .openers import ImageOpener from .orientations import aff2axcodes From d6442c14fd9e51c9e6ccbc3d4982fb6e0f9e9593 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 12 Aug 2019 14:11:44 -0400 Subject: [PATCH 203/689] TEST: Minimum numpy >= 1.12 --- nibabel/tests/test_fileslice.py | 5 +---- nibabel/tests/test_floating.py | 4 +--- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index 2b804195ef..19735200eb 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -11,9 +11,6 @@ import numpy as np -# np > 1.11 makes double ellipsis illegal in indices -HAVE_NP_GT_1p11 = LooseVersion(np.__version__) > '1.11' - from ..fileslice import (is_fancy, canonical_slicers, fileslice, predict_shape, read_segments, _positive_slice, threshold_heuristic, optimize_slicer, slice2len, @@ -46,7 +43,7 @@ def test_is_fancy(): _check_slice(slice0) _check_slice((slice0,)) # tuple is same # Double ellipsis illegal in np 1.12dev - set up check for that case - maybe_bad = HAVE_NP_GT_1p11 and slice0 is Ellipsis + maybe_bad = slice0 is Ellipsis for slice1 in slices: if maybe_bad and slice1 is Ellipsis: continue diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index 7e0f14702a..d9401db156 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -108,9 +108,7 @@ def test_check_nmant_nexp(): assert_true(_check_nmant(t, ti['nmant'])) # Test fails for longdouble after blacklisting of OSX powl as of numpy # 1.12 - see https://github.com/numpy/numpy/issues/8307 - if (t != np.longdouble or - sys.platform != 'darwin' or - LooseVersion(np.__version__) < LooseVersion('1.12')): + if t != np.longdouble or sys.platform != 'darwin': assert_true(_check_maxexp(t, ti['maxexp'])) From 6c66c6a5f09de88e2ac58bfbdd3e29051c12729d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 15 Aug 2019 06:30:55 -0400 Subject: [PATCH 204/689] DOC: Minimum Python == 3.5.1 --- doc/source/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 5165fa18d0..2dab695e80 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -83,7 +83,7 @@ Requirements .. check these against setup.cfg -* Python_ 3.5 or greater +* Python_ 3.5.1 or greater * NumPy_ 1.12 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) From 8dd3419e250b6c3b08f5a6b4f1fac2106b581a80 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 25 May 2019 21:03:02 -0400 Subject: [PATCH 205/689] MAINT: Set up versioneer --- .gitattributes | 1 + MANIFEST.in | 2 + nibabel/__init__.py | 6 +- nibabel/_version.py | 520 ++++++++++++ setup.cfg | 7 + setup.py | 12 +- versioneer.py | 1822 +++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 2360 insertions(+), 10 deletions(-) create mode 100644 nibabel/_version.py create mode 100644 versioneer.py diff --git a/.gitattributes b/.gitattributes index d32a3d189c..54f60617d5 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,2 @@ nibabel/COMMIT_INFO.txt export-subst +nibabel/_version.py export-subst diff --git a/MANIFEST.in b/MANIFEST.in index 439af883cd..526e478f39 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -10,3 +10,5 @@ recursive-include nibabel/externals/tests/data * recursive-include nibabel/nicom/tests/data * recursive-include nibabel/gifti/tests/data * include nibabel/COMMIT_INFO.txt +include versioneer.py +include nibabel/_version.py diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 20fdad3469..a599b2e31f 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -9,7 +9,11 @@ import os -from .info import __version__, long_description as __doc__ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions + +from .info import long_description as __doc__ __doc__ += """ Quickstart ========== diff --git a/nibabel/_version.py b/nibabel/_version.py new file mode 100644 index 0000000000..674e0d4e41 --- /dev/null +++ b/nibabel/_version.py @@ -0,0 +1,520 @@ + +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "" + cfg.parentdir_prefix = "" + cfg.versionfile_source = "nibabel/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/setup.cfg b/setup.cfg index 7ba13660eb..ebfeb72f39 100644 --- a/setup.cfg +++ b/setup.cfg @@ -75,3 +75,10 @@ exclude = nibabel/externals/* */__init__.py +[versioneer] +VCS = git +style = pep440 +versionfile_source = nibabel/_version.py +versionfile_build = nibabel/_version.py +tag_prefix = +parentdir_prefix = diff --git a/setup.py b/setup.py index 3fb24126a8..9c281a032c 100755 --- a/setup.py +++ b/setup.py @@ -13,13 +13,7 @@ import os from setuptools import setup - -# nisext is nipy setup extensions, which we're mostly moving away from -# get_comrec_build stores the current commit in COMMIT_HASH.txt at build time -# read_vars_from evaluates a python file and makes variables available -from nisext.sexts import get_comrec_build, read_vars_from - -INFO = read_vars_from(os.path.join('nibabel', 'info.py')) +import versioneer # Give setuptools a hint to complain if it's too old a version # 30.3.0 allows us to put most metadata in setup.cfg @@ -30,6 +24,6 @@ if __name__ == "__main__": setup(name='nibabel', - version=INFO.VERSION, setup_requires=SETUP_REQUIRES, - cmdclass={'build_py': get_comrec_build('nibabel')}) + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass()) diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 0000000000..64fea1c892 --- /dev/null +++ b/versioneer.py @@ -0,0 +1,1822 @@ + +# Version: 0.18 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/warner/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other langauges) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + +### Unicode version strings + +While Versioneer works (and is continually tested) with both Python 2 and +Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. +Newer releases probably generate unicode version strings on py2. It's not +clear that this is wrong, but it may be surprising for applications when then +write these strings to a network connection or include them in bytes-oriented +APIs like cryptographic checksums. + +[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates +this question. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +""" + +from __future__ import print_function +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.SafeConfigParser() + with open(setup_cfg, "r") as f: + parser.readfp(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY['git'] = ''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + try: + from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + except ImportError: + from py2exe.build_exe import py2exe as _py2exe # py2 + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1) From 35c9050b46707f342ef1136bcb97a7db36b639ec Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 25 May 2019 21:52:58 -0400 Subject: [PATCH 206/689] MAINT: Drop COMMIT_INFO --- .gitattributes | 1 - MANIFEST.in | 1 - nibabel/COMMIT_INFO.txt | 6 ---- nibabel/pkg_info.py | 65 +++++++++++++---------------------------- 4 files changed, 20 insertions(+), 53 deletions(-) delete mode 100644 nibabel/COMMIT_INFO.txt diff --git a/.gitattributes b/.gitattributes index 54f60617d5..9f3e8c9167 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1 @@ -nibabel/COMMIT_INFO.txt export-subst nibabel/_version.py export-subst diff --git a/MANIFEST.in b/MANIFEST.in index 526e478f39..a901441ed6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,6 +9,5 @@ recursive-include nibabel/tests/data * recursive-include nibabel/externals/tests/data * recursive-include nibabel/nicom/tests/data * recursive-include nibabel/gifti/tests/data * -include nibabel/COMMIT_INFO.txt include versioneer.py include nibabel/_version.py diff --git a/nibabel/COMMIT_INFO.txt b/nibabel/COMMIT_INFO.txt deleted file mode 100644 index dcaee0b8ed..0000000000 --- a/nibabel/COMMIT_INFO.txt +++ /dev/null @@ -1,6 +0,0 @@ -# This is an ini file that may contain information about the code state -[commit hash] -# The line below may contain a valid hash if it has been substituted during 'git archive' -archive_subst_hash=$Format:%h$ -# This line may be modified by the install process -install_hash= diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index bc58c3bdc9..b606fed0f3 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -1,31 +1,16 @@ -import os import sys -import subprocess -try: - from ConfigParser import RawConfigParser as ConfigParser -except ImportError: - from configparser import RawConfigParser as ConfigParser # python 3 +from . import _version -COMMIT_INFO_FNAME = 'COMMIT_INFO.txt' +def pkg_commit_hash(pkg_path=None): + ''' Get short form of commit hash -def pkg_commit_hash(pkg_path): - ''' Get short form of commit hash given directory `pkg_path` + Versioneer placed a ``_version.py`` file in the package directory. This file + gets updated on installation or ``git archive``. + We inspect the contents of ``_version`` to detect whether we are in a + repository, an archive of the repository, or an installed package. - There should be a file called 'COMMIT_INFO.txt' in `pkg_path`. This is a - file in INI file format, with at least one section: ``commit hash``, and - two variables ``archive_subst_hash`` and ``install_hash``. The first has a - substitution pattern in it which may have been filled by the execution of - ``git archive`` if this is an archive generated that way. The second is - filled in by the installation, if the installation is from a git archive. - - We get the commit hash from (in order of preference): - - * A substituted value in ``archive_subst_hash`` - * A written commit hash value in ``install_hash` - * git's output, if we are in a git repository - - If all these fail, we return a not-found placeholder tuple + If detection fails, we return a not-found placeholder tuple Parameters ---------- @@ -39,27 +24,17 @@ def pkg_commit_hash(pkg_path): hash_str : str short form of hash ''' - # Try and get commit from written commit text file - pth = os.path.join(pkg_path, COMMIT_INFO_FNAME) - if not os.path.isfile(pth): - raise IOError('Missing commit info file %s' % pth) - cfg_parser = ConfigParser() - cfg_parser.read(pth) - archive_subst = cfg_parser.get('commit hash', 'archive_subst_hash') - if not archive_subst.startswith('$Format'): # it has been substituted - return 'archive substitution', archive_subst - install_subst = cfg_parser.get('commit hash', 'install_hash') - if install_subst != '': - return 'installation', install_subst - # maybe we are in a repository - proc = subprocess.Popen('git rev-parse --short HEAD', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=pkg_path, shell=True) - repo_commit, _ = proc.communicate() - if repo_commit: - return 'repository', repo_commit.strip() - return '(none found)', '' + versions = _version.get_versions() + hash_str = versions['full-revisionid'][:7] + if hasattr(_version, 'version_json'): + hash_from = 'installation' + elif not _version.get_keywords()['full'].startswith('$Format:'): + hash_from = 'archive substitution' + elif versions['version'] == '0+unknown': + hash_from, hash_str = '(none found)', '' + else: + hash_from = 'repository' + return hash_from, hash_str def get_pkg_info(pkg_path): @@ -75,7 +50,7 @@ def get_pkg_info(pkg_path): context : dict with named parameters of interest ''' - src, hsh = pkg_commit_hash(pkg_path) + src, hsh = pkg_commit_hash() import numpy return dict( pkg_path=pkg_path, From 4dfc7c42d55dad774269a9ef95a2414008d93e35 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 May 2019 09:42:22 -0400 Subject: [PATCH 207/689] RF: Drop old __version__ --- nibabel/__init__.py | 6 +----- nibabel/info.py | 24 ++++-------------------- 2 files changed, 5 insertions(+), 25 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index a599b2e31f..20fdad3469 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -9,11 +9,7 @@ import os -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions - -from .info import long_description as __doc__ +from .info import __version__, long_description as __doc__ __doc__ += """ Quickstart ========== diff --git a/nibabel/info.py b/nibabel/info.py index 39b77421db..d9fb12e09b 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -1,4 +1,4 @@ -""" Define distrubution parameters for nibabel, including package version +""" Define distribution parameters for nibabel, including package version This file contains defines parameters for nibabel that we use to fill settings in setup.py, the nibabel top-level docstring, and for building the docs. In @@ -8,25 +8,9 @@ import re from distutils.version import StrictVersion -# nibabel version information. An empty _version_extra corresponds to a -# full release. *Any string* in `_version_extra` labels the version as -# pre-release. So, if `_version_extra` is not empty, the version is taken to -# be earlier than the same version where `_version_extra` is empty (see -# `cmp_pkg_version` below). -# -# We usually use `dev` as `_version_extra` to label this as a development -# (pre-release) version. -_version_major = 3 -_version_minor = 0 -_version_micro = 0 -_version_extra = 'dev' -# _version_extra = '' - -# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" -__version__ = "%s.%s.%s%s" % (_version_major, - _version_minor, - _version_micro, - _version_extra) +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions def _parse_version(version_str): From 572fa1d0c6b86187fe0a385b897b66ec759fbab4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 5 Aug 2019 17:34:51 -0700 Subject: [PATCH 208/689] RF: Move all version play from info to pkg_info --- nibabel/__init__.py | 3 +- nibabel/deprecated.py | 2 +- nibabel/info.py | 74 ++------------------------- nibabel/pkg_info.py | 59 +++++++++++++++++++++ nibabel/tests/test_deprecated.py | 10 ++-- nibabel/tests/test_info.py | 47 ----------------- nibabel/tests/test_pkg_info.py | 44 ++++++++++++++++ nibabel/tests/test_removalschedule.py | 2 +- 8 files changed, 117 insertions(+), 124 deletions(-) delete mode 100644 nibabel/tests/test_info.py diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 20fdad3469..2e4f877c5f 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -9,7 +9,8 @@ import os -from .info import __version__, long_description as __doc__ +from .pkg_info import __version__ +from .info import long_description as __doc__ __doc__ += """ Quickstart ========== diff --git a/nibabel/deprecated.py b/nibabel/deprecated.py index c8abee91a0..715ee7f60d 100644 --- a/nibabel/deprecated.py +++ b/nibabel/deprecated.py @@ -4,7 +4,7 @@ import warnings from .deprecator import Deprecator -from .info import cmp_pkg_version +from .pkg_info import cmp_pkg_version class ModuleProxy(object): diff --git a/nibabel/info.py b/nibabel/info.py index d9fb12e09b..cec222346f 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -1,73 +1,11 @@ -""" Define distribution parameters for nibabel, including package version +""" Define long_description parameter -This file contains defines parameters for nibabel that we use to fill settings -in setup.py, the nibabel top-level docstring, and for building the docs. In -setup.py in particular, we exec this file, so it cannot import nibabel. +This parameter is used to fill settings in setup.py, the nibabel top-level +docstring, and in building the docs. +We exec this file in several places, so it cannot import nibabel or use +relative imports. """ -import re -from distutils.version import StrictVersion - -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions - - -def _parse_version(version_str): - """ Parse version string `version_str` in our format - """ - match = re.match(r'([0-9.]*\d)(.*)', version_str) - if match is None: - raise ValueError('Invalid version ' + version_str) - return match.groups() - - -def _cmp(a, b): - """ Implementation of ``cmp`` for Python 3 - """ - return (a > b) - (a < b) - - -def cmp_pkg_version(version_str, pkg_version_str=__version__): - """ Compare `version_str` to current package version - - To be valid, a version must have a numerical major version followed by a - dot, followed by a numerical minor version. It may optionally be followed - by a dot and a numerical micro version, and / or by an "extra" string. - *Any* extra string labels the version as pre-release, so `1.2.0somestring` - compares as prior to (pre-release for) `1.2.0`, where `somestring` can be - any string. - - Parameters - ---------- - version_str : str - Version string to compare to current package version - pkg_version_str : str, optional - Version of our package. Optional, set fom ``__version__`` by default. - - Returns - ------- - version_cmp : int - 1 if `version_str` is a later version than `pkg_version_str`, 0 if - same, -1 if earlier. - - Examples - -------- - >>> cmp_pkg_version('1.2.1', '1.2.0') - 1 - >>> cmp_pkg_version('1.2.0dev', '1.2.0') - -1 - """ - version, extra = _parse_version(version_str) - pkg_version, pkg_extra = _parse_version(pkg_version_str) - if version != pkg_version: - return _cmp(StrictVersion(version), StrictVersion(pkg_version)) - return (0 if extra == pkg_extra - else 1 if extra == '' - else -1 if pkg_extra == '' - else _cmp(extra, pkg_extra)) - - # Note: this long_description is the canonical place to edit this text. # It also appears in README.rst, but it should get there by running # ``tools/refresh_readme.py`` which pulls in this version. @@ -157,5 +95,3 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): .. _zenodo: https://zenodo.org .. _Digital Object Identifier: https://en.wikipedia.org/wiki/Digital_object_identifier """ - -VERSION = __version__ diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index b606fed0f3..9b88bc764b 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -1,6 +1,65 @@ import sys +import re +from distutils.version import StrictVersion from . import _version +__version__ = _version.get_versions()['version'] + + +def _parse_version(version_str): + """ Parse version string `version_str` in our format + """ + match = re.match(r'([0-9.]*\d)(.*)', version_str) + if match is None: + raise ValueError('Invalid version ' + version_str) + return match.groups() + + +def _cmp(a, b): + """ Implementation of ``cmp`` for Python 3 + """ + return (a > b) - (a < b) + + +def cmp_pkg_version(version_str, pkg_version_str=__version__): + """ Compare `version_str` to current package version + + To be valid, a version must have a numerical major version followed by a + dot, followed by a numerical minor version. It may optionally be followed + by a dot and a numerical micro version, and / or by an "extra" string. + *Any* extra string labels the version as pre-release, so `1.2.0somestring` + compares as prior to (pre-release for) `1.2.0`, where `somestring` can be + any string. + + Parameters + ---------- + version_str : str + Version string to compare to current package version + pkg_version_str : str, optional + Version of our package. Optional, set fom ``__version__`` by default. + + Returns + ------- + version_cmp : int + 1 if `version_str` is a later version than `pkg_version_str`, 0 if + same, -1 if earlier. + + Examples + -------- + >>> cmp_pkg_version('1.2.1', '1.2.0') + 1 + >>> cmp_pkg_version('1.2.0dev', '1.2.0') + -1 + """ + version, extra = _parse_version(version_str) + pkg_version, pkg_extra = _parse_version(pkg_version_str) + if version != pkg_version: + return _cmp(StrictVersion(version), StrictVersion(pkg_version)) + return (0 if extra == pkg_extra + else 1 if extra == '' + else -1 if pkg_extra == '' + else _cmp(extra, pkg_extra)) + def pkg_commit_hash(pkg_path=None): ''' Get short form of commit hash diff --git a/nibabel/tests/test_deprecated.py b/nibabel/tests/test_deprecated.py index e2e7f099ac..2964707717 100644 --- a/nibabel/tests/test_deprecated.py +++ b/nibabel/tests/test_deprecated.py @@ -3,7 +3,7 @@ import warnings -from nibabel import info +from nibabel import pkg_info from nibabel.deprecated import (ModuleProxy, FutureWarningMixin, deprecate_with_version) @@ -14,12 +14,12 @@ def setup(): # Hack nibabel version string - info.cmp_pkg_version.__defaults__ = ('2.0',) + pkg_info.cmp_pkg_version.__defaults__ = ('2.0',) def teardown(): # Hack nibabel version string back again - info.cmp_pkg_version.__defaults__ = (info.__version__,) + pkg_info.cmp_pkg_version.__defaults__ = (pkg_info.__version__,) def test_module_proxy(): @@ -76,8 +76,8 @@ def func(): return 99 try: - info.cmp_pkg_version.__defaults__ = ('2.0dev',) + pkg_info.cmp_pkg_version.__defaults__ = ('2.0dev',) # No error, even though version is dev version of current assert_equal(func(), 99) finally: - info.cmp_pkg_version.__defaults__ = ('2.0',) + pkg_info.cmp_pkg_version.__defaults__ = ('2.0',) diff --git a/nibabel/tests/test_info.py b/nibabel/tests/test_info.py deleted file mode 100644 index d16c7f76b8..0000000000 --- a/nibabel/tests/test_info.py +++ /dev/null @@ -1,47 +0,0 @@ -""" Testing info module -""" - -import nibabel as nib -from nibabel import info -from nibabel.info import cmp_pkg_version - -from nose.tools import (assert_raises, assert_equal) - - -def test_version(): - # Test info version is the same as our own version - assert_equal(info.__version__, nib.__version__) - - -def test_cmp_pkg_version(): - # Test version comparator - assert_equal(cmp_pkg_version(info.__version__), 0) - assert_equal(cmp_pkg_version('0.0'), -1) - assert_equal(cmp_pkg_version('1000.1000.1'), 1) - assert_equal(cmp_pkg_version(info.__version__, info.__version__), 0) - for test_ver, pkg_ver, exp_out in (('1.0', '1.0', 0), - ('1.0.0', '1.0', 0), - ('1.0', '1.0.0', 0), - ('1.1', '1.1', 0), - ('1.2', '1.1', 1), - ('1.1', '1.2', -1), - ('1.1.1', '1.1.1', 0), - ('1.1.2', '1.1.1', 1), - ('1.1.1', '1.1.2', -1), - ('1.1', '1.1dev', 1), - ('1.1dev', '1.1', -1), - ('1.2.1', '1.2.1rc1', 1), - ('1.2.1rc1', '1.2.1', -1), - ('1.2.1rc1', '1.2.1rc', 1), - ('1.2.1rc', '1.2.1rc1', -1), - ('1.2.1rc1', '1.2.1rc', 1), - ('1.2.1rc', '1.2.1rc1', -1), - ('1.2.1b', '1.2.1a', 1), - ('1.2.1a', '1.2.1b', -1), - ): - assert_equal(cmp_pkg_version(test_ver, pkg_ver), exp_out) - assert_raises(ValueError, cmp_pkg_version, 'foo.2') - assert_raises(ValueError, cmp_pkg_version, 'foo.2', '1.0') - assert_raises(ValueError, cmp_pkg_version, '1.0', 'foo.2') - assert_raises(ValueError, cmp_pkg_version, '1') - assert_raises(ValueError, cmp_pkg_version, 'foo') diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index e931493fa5..aef7913e43 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -2,6 +2,10 @@ """ import nibabel as nib +from nibabel.pkg_info import cmp_pkg_version + +from nose.tools import (assert_raises, assert_equal) + def test_pkg_info(): """Simple smoke test @@ -12,3 +16,43 @@ def test_pkg_info(): - nibabel.pkg_info.pkg_commit_hash """ info = nib.get_info() + + +def test_version(): + # Test info version is the same as our own version + assert_equal(nib.pkg_info.__version__, nib.__version__) + + +def test_cmp_pkg_version(): + # Test version comparator + assert_equal(cmp_pkg_version(nib.__version__), 0) + assert_equal(cmp_pkg_version('0.0'), -1) + assert_equal(cmp_pkg_version('1000.1000.1'), 1) + assert_equal(cmp_pkg_version(nib.__version__, nib.__version__), 0) + for test_ver, pkg_ver, exp_out in (('1.0', '1.0', 0), + ('1.0.0', '1.0', 0), + ('1.0', '1.0.0', 0), + ('1.1', '1.1', 0), + ('1.2', '1.1', 1), + ('1.1', '1.2', -1), + ('1.1.1', '1.1.1', 0), + ('1.1.2', '1.1.1', 1), + ('1.1.1', '1.1.2', -1), + ('1.1', '1.1dev', 1), + ('1.1dev', '1.1', -1), + ('1.2.1', '1.2.1rc1', 1), + ('1.2.1rc1', '1.2.1', -1), + ('1.2.1rc1', '1.2.1rc', 1), + ('1.2.1rc', '1.2.1rc1', -1), + ('1.2.1rc1', '1.2.1rc', 1), + ('1.2.1rc', '1.2.1rc1', -1), + ('1.2.1b', '1.2.1a', 1), + ('1.2.1a', '1.2.1b', -1), + ): + assert_equal(cmp_pkg_version(test_ver, pkg_ver), exp_out) + assert_raises(ValueError, cmp_pkg_version, 'foo.2') + assert_raises(ValueError, cmp_pkg_version, 'foo.2', '1.0') + assert_raises(ValueError, cmp_pkg_version, '1.0', 'foo.2') + assert_raises(ValueError, cmp_pkg_version, '1') + assert_raises(ValueError, cmp_pkg_version, 'foo') + diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 24f9bdd12c..3e905a6446 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,4 +1,4 @@ -from ..info import cmp_pkg_version +from ..pkg_info import cmp_pkg_version from ..testing import assert_raises, assert_false MODULE_SCHEDULE = [ From 389059e5421900c2d58be0b3c3c7496aecd7f723 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 15 Aug 2019 03:38:02 -0400 Subject: [PATCH 209/689] MAINT: Add fallback version to versioneer + git-archive --- nibabel/_version.py | 12 +++++++++--- nibabel/info.py | 23 ++++++++++++++++++++--- versioneer.py | 24 ++++++++++++++++++------ 3 files changed, 47 insertions(+), 12 deletions(-) diff --git a/nibabel/_version.py b/nibabel/_version.py index 674e0d4e41..cbe7d6a210 100644 --- a/nibabel/_version.py +++ b/nibabel/_version.py @@ -15,6 +15,7 @@ import re import subprocess import sys +import runpy def get_keywords(): @@ -155,6 +156,11 @@ def git_get_keywords(versionfile_abs): f.close() except EnvironmentError: pass + try: + rel = runpy.run_path(os.path.join(os.path.dirname(versionfile_abs), "info.py")) + keywords["fallback"] = rel["VERSION"] + except (FileNotFoundError, KeyError): + pass return keywords @@ -205,10 +211,10 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there + # no suitable tags, so inspect ./info.py if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", + print("no suitable tags, falling back to info.VERSION or 0+unknown") + return {"version": keywords.get("fallback", "0+unknown"), "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} diff --git a/nibabel/info.py b/nibabel/info.py index cec222346f..dbd877318c 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -1,11 +1,28 @@ -""" Define long_description parameter +""" Define distribution parameters for nibabel, including package version -This parameter is used to fill settings in setup.py, the nibabel top-level -docstring, and in building the docs. +The long description parameter is used to fill settings in setup.py, the +nibabel top-level docstring, and in building the docs. We exec this file in several places, so it cannot import nibabel or use relative imports. """ +# nibabel version information +# This is a fall-back for versioneer when installing from a git archive. +# This should be set to the intended next version + dev to indicate a +# development (pre-release) version. +_version_major = 3 +_version_minor = 0 +_version_micro = 0 +_version_extra = 'dev' +# _version_extra = '' + +# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" +VERSION = "%s.%s.%s%s" % (_version_major, + _version_minor, + _version_micro, + _version_extra) + + # Note: this long_description is the canonical place to edit this text. # It also appears in README.rst, but it should get there by running # ``tools/refresh_readme.py`` which pulls in this version. diff --git a/versioneer.py b/versioneer.py index 64fea1c892..7ede43a06a 100644 --- a/versioneer.py +++ b/versioneer.py @@ -287,6 +287,7 @@ import re import subprocess import sys +import runpy class VersioneerConfig: @@ -435,6 +436,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, import re import subprocess import sys +import runpy def get_keywords(): @@ -575,6 +577,11 @@ def git_get_keywords(versionfile_abs): f.close() except EnvironmentError: pass + try: + rel = runpy.run_path(os.path.join(os.path.dirname(versionfile_abs), "info.py")) + keywords["fallback"] = rel["VERSION"] + except (FileNotFoundError, KeyError): + pass return keywords @@ -625,10 +632,10 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there + # no suitable tags, so inspect ./info.py if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", + print("no suitable tags, falling back to info.VERSION or 0+unknown") + return {"version": keywords.get("fallback", "0+unknown"), "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @@ -967,6 +974,11 @@ def git_get_keywords(versionfile_abs): f.close() except EnvironmentError: pass + try: + rel = runpy.run_path(os.path.join(os.path.dirname(versionfile_abs), "info.py")) + keywords["fallback"] = rel["VERSION"] + except (FileNotFoundError, KeyError): + pass return keywords @@ -1017,10 +1029,10 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there + # no suitable tags, so inspect ./info.py if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", + print("no suitable tags, falling back to info.VERSION or 0+unknown") + return {"version": keywords.get("fallback", "0+unknown"), "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} From 4c2fd468652a85726866efa4f71e0a0d7cd77968 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 15 Aug 2019 05:01:42 -0400 Subject: [PATCH 210/689] TEST: Test fallback version --- nibabel/tests/test_pkg_info.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index aef7913e43..1a5775d33f 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -3,12 +3,13 @@ import nibabel as nib from nibabel.pkg_info import cmp_pkg_version +from ..info import VERSION from nose.tools import (assert_raises, assert_equal) def test_pkg_info(): - """Simple smoke test + """Smoke test nibabel.get_info() Hits: - nibabel.get_info @@ -23,6 +24,20 @@ def test_version(): assert_equal(nib.pkg_info.__version__, nib.__version__) +def test_fallback_version(): + """Test fallback version is up-to-date + + This should only fail if we fail to bump nibabel.info.VERSION immediately + after release + """ + assert ( + # dev version should be larger than tag+commit-githash + cmp_pkg_version(VERSION) >= 0 or + # Allow VERSION bump to lag releases by one commit + VERSION == nib.__version__ + 'dev'), \ + "nibabel.info.VERSION does not match current tag information" + + def test_cmp_pkg_version(): # Test version comparator assert_equal(cmp_pkg_version(nib.__version__), 0) From 3a6bf6c56026a986de6cd2314c06061bf1b99a12 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 15 Aug 2019 10:22:38 -0400 Subject: [PATCH 211/689] VERSIONEER: Improve short version file handling Made and tagged nibabel-specific hacks for including a fallback version for using with git-archive. This is no longer the canonical versioneer but should behave identically except in git archives made on non-release revisions. --- nibabel/_version.py | 7 +++++-- versioneer.py | 14 ++++++++++---- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/nibabel/_version.py b/nibabel/_version.py index cbe7d6a210..08332076d9 100644 --- a/nibabel/_version.py +++ b/nibabel/_version.py @@ -156,6 +156,8 @@ def git_get_keywords(versionfile_abs): f.close() except EnvironmentError: pass + # CJM: Nibabel hack to ensure we can git-archive off-release versions and + # revert to old X.Y.Zdev versions + githash try: rel = runpy.run_path(os.path.join(os.path.dirname(versionfile_abs), "info.py")) keywords["fallback"] = rel["VERSION"] @@ -167,8 +169,9 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + # CJM: Nibabel fix to avoid hitting unguarded dictionary lookup, better explanation + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant diff --git a/versioneer.py b/versioneer.py index 7ede43a06a..18682b93bd 100644 --- a/versioneer.py +++ b/versioneer.py @@ -577,6 +577,8 @@ def git_get_keywords(versionfile_abs): f.close() except EnvironmentError: pass + # CJM: Nibabel hack to ensure we can git-archive off-release versions and + # revert to old X.Y.Zdev versions + githash try: rel = runpy.run_path(os.path.join(os.path.dirname(versionfile_abs), "info.py")) keywords["fallback"] = rel["VERSION"] @@ -588,8 +590,9 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + # CJM: Nibabel fix to avoid hitting unguarded dictionary lookup, better explanation + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant @@ -974,6 +977,8 @@ def git_get_keywords(versionfile_abs): f.close() except EnvironmentError: pass + # CJM: Nibabel hack to ensure we can git-archive off-release versions and + # revert to old X.Y.Zdev versions + githash try: rel = runpy.run_path(os.path.join(os.path.dirname(versionfile_abs), "info.py")) keywords["fallback"] = rel["VERSION"] @@ -985,8 +990,9 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + # CJM: Nibabel fix to avoid hitting unguarded dictionary lookup, better explanation + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant From 30c536f2ed7b2f293c422d03eabd5d4fbe0c8175 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 15 Aug 2019 10:24:12 -0400 Subject: [PATCH 212/689] MAINT: Ignore _version.py in coverage --- .coveragerc | 1 + 1 file changed, 1 insertion(+) diff --git a/.coveragerc b/.coveragerc index e2ec8ff3cd..57747ec0d8 100644 --- a/.coveragerc +++ b/.coveragerc @@ -6,3 +6,4 @@ omit = */externals/* */benchmarks/* */tests/* + nibabel/_version.py From 6feb58126b2bdc33a50e7c9ef80f224f3edac90f Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Sun, 18 Aug 2019 15:54:58 +0100 Subject: [PATCH 213/689] add gifti image class function agg_data --- nibabel/gifti/gifti.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 22d6449e9a..7eb458c49a 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -680,6 +680,43 @@ def get_arrays_from_intent(self, intent): it = intent_codes.code[intent] return [x for x in self.darrays if x.intent == it] + def agg_data(self, intent_code=None): + """ + Retrun a numpy arrary of aggregated GiftiDataArray of the same intent code + or + Retrun GiftiDataArray in tuples for surface files + + Parameters + ---------- + intent_code : None, string, integer or tuple of string, optional + Intent code, or string describing code. + Accept tuple that contains multiple intents to specify the order. + + Returns + ------- + all_data : tuple of ndarray or ndarray + If the input is a tuple, the returned tuple will match the order. + """ + + # Allow multiple intents to specify the order + # e.g., agg_data(('pointset', 'triangle')) ensures consistent order + + if isinstance(intent_code, tuple): + return tuple(self.agg_data(intent_code=code) for code in intent_code) + + darrays = self.darrays if intent_code is None else self.get_arrays_from_intent(intent_code) + all_data = tuple(da.data for da in darrays) + all_intent = tuple(intent_codes.niistring[da.intent] for da in darrays) + + # Gifti files allows usually one or more data array of the same intent code + # surf.gii is a special case of having two data array of different intent code + + if (self.numDA > 1 and all(el == all_intent[0] for el in all_intent)): + return np.column_stack(all_data) + else: + return all_data + + @deprecate_with_version( 'getArraysFromIntent method deprecated. ' "Use get_arrays_from_intent instead.", From 72471a74ce5961296ad0394460173facfc09cb2e Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Sun, 18 Aug 2019 16:39:02 +0100 Subject: [PATCH 214/689] PEP8 --- nibabel/gifti/gifti.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 7eb458c49a..b2c433fe8e 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -695,7 +695,7 @@ def agg_data(self, intent_code=None): Returns ------- all_data : tuple of ndarray or ndarray - If the input is a tuple, the returned tuple will match the order. + If the input is a tuple, the returned tuple will match the order. """ # Allow multiple intents to specify the order @@ -716,7 +716,6 @@ def agg_data(self, intent_code=None): else: return all_data - @deprecate_with_version( 'getArraysFromIntent method deprecated. ' "Use get_arrays_from_intent instead.", From 1f124ea6cda17f8ef15cacaf5e5883d6cb6f4f52 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 19 Aug 2019 09:19:28 -0400 Subject: [PATCH 215/689] MNT: Deprecate DataobjImage.get_data, schedule for removal in 5.0 --- nibabel/dataobj_images.py | 5 +++++ nibabel/tests/test_removalschedule.py | 22 ++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index d7d082403d..9ba97789dc 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -56,6 +56,11 @@ def dataobj(self): def _data(self): return self._dataobj + @deprecate_with_version('get_data() is deprecated in favor of get_fdata(),' + ' which has a more predictable return type. To ' + 'obtain get_data() behavior going forward, use ' + 'numpy.asanyarray(img.dataobj).', + '3.0', '5.0') def get_data(self, caching='fill'): """ Return image data from image with any necessary scaling applied diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 3e905a6446..ce1ba668b2 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -14,6 +14,12 @@ ('1.0.0', [('nibabel', 'neverexisted')]), ] +ATTRIBUTE_SCHEDULE = [ + ('5.0.0', [('nibabel.dataobj_images', 'DataobjImage', 'get_data')]), + # Verify that the test will be quiet if the schedule outlives the modules + ('1.0.0', [('nibabel', 'Nifti1Image', 'neverexisted')]), + ] + def test_module_removal(): for version, to_remove in MODULE_SCHEDULE: @@ -32,3 +38,19 @@ def test_object_removal(): except ImportError: continue assert_false(hasattr(module, obj), msg="Time to remove %s.%s" % (module_name, obj)) + + +def test_attribute_removal(): + for version, to_remove in ATTRIBUTE_SCHEDULE: + if cmp_pkg_version(version) < 1: + for module_name, cls, attr in to_remove: + try: + module = __import__(module_name) + except ImportError: + continue + try: + klass = getattr(module, cls) + except AttributeError: + continue + assert_false(hasattr(klass, attr), + msg="Time to remove %s.%s.%s" % (module_name, cls, attr)) From 89158074ad0507a1c122a948c836f806a77057e9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 19 Aug 2019 10:46:52 -0400 Subject: [PATCH 216/689] CI: Drop separate indexed_gzip test --- .travis.yml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1ba3fd50a7..e10d99db31 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,8 +13,7 @@ cache: - $HOME/.cache/pip env: global: - - DEPENDS="six numpy scipy matplotlib h5py pillow pydicom" - - OPTIONAL_DEPENDS="" + - DEPENDS="six numpy scipy matplotlib h5py pillow pydicom indexed_gzip" - INSTALL_TYPE="setup" - CHECK_TYPE="test" - EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" @@ -74,10 +73,7 @@ matrix: - python: 3.5 env: - CHECK_TYPE="doc_doctests" - # Run tests with indexed_gzip present - - python: 3.5 - env: - - OPTIONAL_DEPENDS="indexed_gzip" + before_install: - travis_retry python -m pip install --upgrade pip - travis_retry pip install --upgrade virtualenv @@ -92,7 +88,8 @@ before_install: - if [ "${CHECK_TYPE}" == "style" ]; then travis_retry pip install flake8; fi - - travis_retry pip install $EXTRA_PIP_FLAGS $DEPENDS $OPTIONAL_DEPENDS + - travis_retry pip install $EXTRA_PIP_FLAGS $DEPENDS + # command to install dependencies install: - | From d4d917a1c0e7b5b840e55ac85e8717d0ade7b6ed Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 19 Aug 2019 12:00:37 -0400 Subject: [PATCH 217/689] STY: Configure pep8speaks --- .pep8speaks.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .pep8speaks.yml diff --git a/.pep8speaks.yml b/.pep8speaks.yml new file mode 100644 index 0000000000..0a0d8c619f --- /dev/null +++ b/.pep8speaks.yml @@ -0,0 +1,12 @@ +scanner: + diff_only: True # Only show errors caused by the patch + linter: flake8 + +message: # Customize the comment made by the bot + opened: # Messages when a new PR is submitted + header: "Hello @{name}, thank you for submitting the Pull Request!" + footer: "To test for issues locally, `pip install flake8` and then run `flake8 nibabel`." + updated: # Messages when new commits are added to the PR + header: "Hello @{name}, Thank you for updating!" + footer: "To test for issues locally, `pip install flake8` and then run `flake8 nibabel`." + no_errors: "Cheers! There are no style issues detected in this Pull Request. :beers: " From 4fb633f9b2829c9139311748a80c7abda013ed9c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 21 Aug 2019 09:56:54 -0400 Subject: [PATCH 218/689] DATA: Add nibabel-data/nitest-dicom submodule --- .gitmodules | 3 +++ nibabel-data/nitest-dicom | 1 + 2 files changed, 4 insertions(+) create mode 160000 nibabel-data/nitest-dicom diff --git a/.gitmodules b/.gitmodules index db0afa268e..a0dc77c8ec 100644 --- a/.gitmodules +++ b/.gitmodules @@ -16,3 +16,6 @@ [submodule "nibabel-data/nitest-cifti2"] path = nibabel-data/nitest-cifti2 url = https://github.com/demianw/nibabel-nitest-cifti2.git +[submodule "nibabel-data/nitest-dicom"] + path = nibabel-data/nitest-dicom + url = https://github.com/effigies/nitest-dicom diff --git a/nibabel-data/nitest-dicom b/nibabel-data/nitest-dicom new file mode 160000 index 0000000000..ff6844f3a5 --- /dev/null +++ b/nibabel-data/nitest-dicom @@ -0,0 +1 @@ +Subproject commit ff6844f3a5ef79974c5809a79314c98fd81693cf From 2fa98728d634c7d5781f8d1cd11520570c993482 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 20 Feb 2019 16:13:23 -0500 Subject: [PATCH 219/689] enh+wip: support for philips dcm w/ derived volume * if DICOM has MR Diffusion Sequence (0018, 9117), discard any derived frames * out correct final shape * TODO: fix get_data() repr --- nibabel/nicom/dicomwrappers.py | 21 ++++++++++++++++----- nibabel/nicom/tests/test_dicomwrappers.py | 8 ++++++++ nibabel/pydicom_compat.py | 1 + 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 194227c6cf..09dbbfdb03 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -21,7 +21,7 @@ from .dwiparams import B2q, nearest_pos_semi_def, q2bg from ..openers import ImageOpener from ..onetime import setattr_on_read as one_time -from ..pydicom_compat import tag_for_keyword +from ..pydicom_compat import tag_for_keyword, Sequence class WrapperError(Exception): @@ -461,10 +461,19 @@ def __init__(self, dcm_data): Wrapper.__init__(self, dcm_data) self.dcm_data = dcm_data self.frames = dcm_data.get('PerFrameFunctionalGroupsSequence') + self._nframes = self.get('NumberOfFrames') try: self.frames[0] except TypeError: raise WrapperError("PerFrameFunctionalGroupsSequence is empty.") + # DWI image where derived isotropic, ADC or trace volume was appended to the series + if self.frames[0].get([0x18, 0x9117]): + self.frames = Sequence( + frame for frame in self.frames if + frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value + != 'ISOTROPIC' + ) + self._nframes = len(self.frames) try: self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: @@ -503,8 +512,7 @@ def image_shape(self): if None in (rows, cols): raise WrapperError("Rows and/or Columns are empty.") # Check number of frames - n_frames = self.get('NumberOfFrames') - assert len(self.frames) == n_frames + assert len(self.frames) == self._nframes frame_indices = np.array( [frame.FrameContentSequence[0].DimensionIndexValues for frame in self.frames]) @@ -528,12 +536,15 @@ def image_shape(self): # Store frame indices self._frame_indices = frame_indices if n_dim < 4: # 3D volume - return rows, cols, n_frames + return rows, cols, self._nframes # More than 3 dimensions ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] + if len(ns_unique) == 3: + # derived volume is included + ns_unique.pop(1) shape = (rows, cols) + tuple(ns_unique) n_vols = np.prod(shape[3:]) - if n_frames != n_vols * shape[2]: + if self._nframes != n_vols * shape[2]: raise WrapperError("Calculated shape does not match number of " "frames.") return tuple(shape) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index bea88936d3..abc21f52e5 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -36,6 +36,7 @@ DATA_FILE_DEC_RSCL = pjoin(IO_DATA_PATH, 'decimal_rescale.dcm') DATA_FILE_4D = pjoin(IO_DATA_PATH, '4d_multiframe_test.dcm') DATA_FILE_EMPTY_ST = pjoin(IO_DATA_PATH, 'slicethickness_empty_string.dcm') +DATA_FILE_4D_DERIVED = pjoin(IO_DATA_PATH, '4d_multiframe_with_derived.dcm') # This affine from our converted image was shown to match our image spatially # with an image from SPM DICOM conversion. We checked the matching with SPM @@ -622,6 +623,13 @@ def test_slicethickness_fallback(self): dw = didw.wrapper_from_file(DATA_FILE_EMPTY_ST) assert_equal(dw.voxel_sizes[2], 1.0) + @dicom_test + def test_data_derived_shape(self): + # Test 4D diffusion data with an additional trace volume included + # Excludes the trace volume and generates the correct shape + dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) + assert_equal(dw.image_shape, (96, 96, 60, 33)) + @dicom_test def test_data_fake(self): # Test algorithm for get_data diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index 7a8658cf47..590d8edf94 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -40,6 +40,7 @@ read_file = pydicom.read_file if have_dicom: + from pydicom.sequence import Sequence try: # Versions >= 1.0 tag_for_keyword = pydicom.datadict.tag_for_keyword From 7550262405a8715e827e6c93ce74299dc24bc1ea Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 20 Feb 2019 16:47:36 -0500 Subject: [PATCH 220/689] fix: (py)dicom import --- nibabel/pydicom_compat.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index 590d8edf94..4ecc1c121b 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -27,6 +27,7 @@ import dicom as pydicom # Values not imported by default import dicom.values + from dicom.sequence import Sequence except ImportError: try: import pydicom @@ -34,13 +35,13 @@ have_dicom = False else: # pydicom module available from pydicom.dicomio import read_file + from pydicom.sequence import Sequence # Values not imported by default import pydicom.values else: # dicom module available read_file = pydicom.read_file if have_dicom: - from pydicom.sequence import Sequence try: # Versions >= 1.0 tag_for_keyword = pydicom.datadict.tag_for_keyword From 3046347ae3cc58949eff88ecc23f5e57c496d432 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 20 Feb 2019 17:24:56 -0500 Subject: [PATCH 221/689] fix: set missing import to None, ensure first frame exists before fetching --- nibabel/nicom/dicomwrappers.py | 2 +- nibabel/pydicom_compat.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 09dbbfdb03..820620196a 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -467,7 +467,7 @@ def __init__(self, dcm_data): except TypeError: raise WrapperError("PerFrameFunctionalGroupsSequence is empty.") # DWI image where derived isotropic, ADC or trace volume was appended to the series - if self.frames[0].get([0x18, 0x9117]): + if self.frames[0] and self.frames[0].get([0x18, 0x9117], None): self.frames = Sequence( frame for frame in self.frames if frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index 4ecc1c121b..e0c390692e 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -21,7 +21,7 @@ import numpy as np have_dicom = True -pydicom = read_file = tag_for_keyword = None +pydicom = read_file = tag_for_keyword = Sequence = None try: import dicom as pydicom From 5608f5cff534ac6a60872627b83aa42723097bd6 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 20 Feb 2019 17:41:55 -0500 Subject: [PATCH 222/689] fix: skip if frame does not have get attribute --- nibabel/nicom/dicomwrappers.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 820620196a..f7b46c3ec0 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -466,14 +466,18 @@ def __init__(self, dcm_data): self.frames[0] except TypeError: raise WrapperError("PerFrameFunctionalGroupsSequence is empty.") - # DWI image where derived isotropic, ADC or trace volume was appended to the series - if self.frames[0] and self.frames[0].get([0x18, 0x9117], None): - self.frames = Sequence( - frame for frame in self.frames if - frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value - != 'ISOTROPIC' - ) - self._nframes = len(self.frames) + try: + # DWI image where derived isotropic, ADC or trace volume + # was appended to the series + if self.frames[0].get([0x18, 0x9117], None): + self.frames = Sequence( + frame for frame in self.frames if + frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value + != 'ISOTROPIC' + ) + self._nframes = len(self.frames) + except AttributeError: + pass try: self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: From d1dce02f45606907cac9d5e09e81742d5cadd4ca Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Feb 2019 15:10:01 -0500 Subject: [PATCH 223/689] sty: import specifics separately --- nibabel/pydicom_compat.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index e0c390692e..beb787f315 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -25,9 +25,6 @@ try: import dicom as pydicom - # Values not imported by default - import dicom.values - from dicom.sequence import Sequence except ImportError: try: import pydicom @@ -39,6 +36,9 @@ # Values not imported by default import pydicom.values else: # dicom module available + # Values not imported by default + import dicom.values + from dicom.sequence import Sequence read_file = pydicom.read_file if have_dicom: From ca5cd312a35b3edcd7577f0ea1cc5598d20462a3 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Feb 2019 15:11:03 -0500 Subject: [PATCH 224/689] fix: allow non-diffusion 5D --- nibabel/nicom/dicomwrappers.py | 36 ++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index f7b46c3ec0..1cb054e7c7 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -461,23 +461,10 @@ def __init__(self, dcm_data): Wrapper.__init__(self, dcm_data) self.dcm_data = dcm_data self.frames = dcm_data.get('PerFrameFunctionalGroupsSequence') - self._nframes = self.get('NumberOfFrames') try: self.frames[0] except TypeError: raise WrapperError("PerFrameFunctionalGroupsSequence is empty.") - try: - # DWI image where derived isotropic, ADC or trace volume - # was appended to the series - if self.frames[0].get([0x18, 0x9117], None): - self.frames = Sequence( - frame for frame in self.frames if - frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value - != 'ISOTROPIC' - ) - self._nframes = len(self.frames) - except AttributeError: - pass try: self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: @@ -515,8 +502,23 @@ def image_shape(self): rows, cols = self.get('Rows'), self.get('Columns') if None in (rows, cols): raise WrapperError("Rows and/or Columns are empty.") + # Check number of frames - assert len(self.frames) == self._nframes + first_frame = self.frames[0] + n_frames = self.get('NumberOfFrames') + # some Philips may have derived images appended + has_derived = False + if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): + # DWI image may include derived isotropic, ADC or trace volume + # check and remove + self.frames = Sequence( + frame for frame in self.frames if + frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value + != 'ISOTROPIC' + ) + n_frames = len(self.frames) + has_derived = True + assert len(self.frames) == n_frames frame_indices = np.array( [frame.FrameContentSequence[0].DimensionIndexValues for frame in self.frames]) @@ -540,15 +542,15 @@ def image_shape(self): # Store frame indices self._frame_indices = frame_indices if n_dim < 4: # 3D volume - return rows, cols, self._nframes + return rows, cols, n_frames # More than 3 dimensions ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] - if len(ns_unique) == 3: + if len(ns_unique) == 3 and has_derived: # derived volume is included ns_unique.pop(1) shape = (rows, cols) + tuple(ns_unique) n_vols = np.prod(shape[3:]) - if self._nframes != n_vols * shape[2]: + if n_frames == self.get('NumberOfFrames') and n_frames != n_vols * shape[2]: raise WrapperError("Calculated shape does not match number of " "frames.") return tuple(shape) From b8f32bf12cb03ac48b32c30717c9d15dfa06a358 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 1 Mar 2019 17:27:49 -0500 Subject: [PATCH 225/689] fix: remove unnecessary check --- nibabel/nicom/dicomwrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 1cb054e7c7..edc57b3c1a 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -550,7 +550,7 @@ def image_shape(self): ns_unique.pop(1) shape = (rows, cols) + tuple(ns_unique) n_vols = np.prod(shape[3:]) - if n_frames == self.get('NumberOfFrames') and n_frames != n_vols * shape[2]: + if n_frames != n_vols * shape[2]: raise WrapperError("Calculated shape does not match number of " "frames.") return tuple(shape) From 21af99d05c37a492f46a4df37d114afcecb162c1 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 14 Mar 2019 12:33:50 -0400 Subject: [PATCH 226/689] rf: better error handling, reflect change on frame_indices --- nibabel/nicom/dicomwrappers.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index edc57b3c1a..75a997808f 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -14,6 +14,7 @@ from __future__ import division import operator +import warnings import numpy as np @@ -511,13 +512,20 @@ def image_shape(self): if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): # DWI image may include derived isotropic, ADC or trace volume # check and remove - self.frames = Sequence( - frame for frame in self.frames if - frame.get([0x18, 0x9117])[0].get([0x18, 0x9075]).value - != 'ISOTROPIC' - ) - n_frames = len(self.frames) - has_derived = True + try: + self.frames = Sequence( + frame for frame in self.frames if + frame.MRDiffusionSequence[0].DiffusionDirectionality + != 'ISOTROPIC' + ) + n_frames = len(self.frames) + has_derived = True + except IndexError: + # Sequence tag is found but missing items! + raise WrapperError("Diffusion file missing information") + except AttributeError: + # DiffusionDirectionality tag is not required + pass assert len(self.frames) == n_frames frame_indices = np.array( [frame.FrameContentSequence[0].DimensionIndexValues @@ -536,6 +544,9 @@ def image_shape(self): if stackid_tag in dim_seq: stackid_dim_idx = dim_seq.index(stackid_tag) frame_indices = np.delete(frame_indices, stackid_dim_idx, axis=1) + if has_derived: + # derived volume is included + frame_indices = np.delete(frame_indices, 1, axis=1) # account for the 2 additional dimensions (row and column) not included # in the indices n_dim = frame_indices.shape[1] + 2 @@ -545,9 +556,6 @@ def image_shape(self): return rows, cols, n_frames # More than 3 dimensions ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] - if len(ns_unique) == 3 and has_derived: - # derived volume is included - ns_unique.pop(1) shape = (rows, cols) + tuple(ns_unique) n_vols = np.prod(shape[3:]) if n_frames != n_vols * shape[2]: From 1040c519963bd13591dd33ad4ba7dddad8d8aee7 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 14 Mar 2019 12:46:52 -0400 Subject: [PATCH 227/689] add: warn if frames were altered --- nibabel/nicom/dicomwrappers.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 75a997808f..ebc1402d28 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -511,21 +511,23 @@ def image_shape(self): has_derived = False if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): # DWI image may include derived isotropic, ADC or trace volume - # check and remove try: self.frames = Sequence( frame for frame in self.frames if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' ) - n_frames = len(self.frames) - has_derived = True + if n_frames != len(self.frames): + warnings.warn("Derived images found and removed") + n_frames = len(self.frames) + has_derived = True except IndexError: # Sequence tag is found but missing items! raise WrapperError("Diffusion file missing information") except AttributeError: # DiffusionDirectionality tag is not required pass + assert len(self.frames) == n_frames frame_indices = np.array( [frame.FrameContentSequence[0].DimensionIndexValues From 8e2bd0857a9f1db65bb5d03f0db26325531ef896 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 18 Mar 2019 11:10:48 -0400 Subject: [PATCH 228/689] rf: condense frame checking try block --- nibabel/nicom/dicomwrappers.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index ebc1402d28..115254d45d 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -517,16 +517,17 @@ def image_shape(self): frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' ) - if n_frames != len(self.frames): - warnings.warn("Derived images found and removed") - n_frames = len(self.frames) - has_derived = True except IndexError: # Sequence tag is found but missing items! raise WrapperError("Diffusion file missing information") except AttributeError: # DiffusionDirectionality tag is not required pass + else: + if n_frames != len(self.frames): + warnings.warn("Derived images found and removed") + n_frames = len(self.frames) + has_derived = True assert len(self.frames) == n_frames frame_indices = np.array( From caa6df963e745c48d679d4d7a702420d59906eec Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 18 Mar 2019 11:57:14 -0400 Subject: [PATCH 229/689] fix: ensure derived index is removed --- nibabel/nicom/dicomwrappers.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 115254d45d..14a92e1c5f 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -547,9 +547,15 @@ def image_shape(self): if stackid_tag in dim_seq: stackid_dim_idx = dim_seq.index(stackid_tag) frame_indices = np.delete(frame_indices, stackid_dim_idx, axis=1) + dim_seq.pop(stackid_dim_idx) if has_derived: # derived volume is included - frame_indices = np.delete(frame_indices, 1, axis=1) + derived_tag = tag_for_keyword("DiffusionBValue") + if derived_tag not in dim_seq: + raise WrapperError("Missing information, cannot remove indices " + "with confidence.") + derived_dim_idx = dim_seq.index(derived_tag) + frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) # account for the 2 additional dimensions (row and column) not included # in the indices n_dim = frame_indices.shape[1] + 2 From 265511e58541d58d8319c8ca2de8270e75660021 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 21 Aug 2019 10:19:44 -0400 Subject: [PATCH 230/689] TEST: Update test DICOM file location --- nibabel/nicom/tests/test_dicomwrappers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index abc21f52e5..1ebf464d0e 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -21,6 +21,7 @@ assert_not_equal, assert_raises) from numpy.testing import assert_array_equal, assert_array_almost_equal +from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data IO_DATA_PATH = pjoin(dirname(__file__), 'data') DATA_FILE = pjoin(IO_DATA_PATH, 'siemens_dwi_1000.dcm.gz') @@ -36,7 +37,8 @@ DATA_FILE_DEC_RSCL = pjoin(IO_DATA_PATH, 'decimal_rescale.dcm') DATA_FILE_4D = pjoin(IO_DATA_PATH, '4d_multiframe_test.dcm') DATA_FILE_EMPTY_ST = pjoin(IO_DATA_PATH, 'slicethickness_empty_string.dcm') -DATA_FILE_4D_DERIVED = pjoin(IO_DATA_PATH, '4d_multiframe_with_derived.dcm') +DATA_FILE_4D_DERIVED = pjoin(get_nibabel_data(), 'nitest-dicom', + '4d_multiframe_with_derived.dcm') # This affine from our converted image was shown to match our image spatially # with an image from SPM DICOM conversion. We checked the matching with SPM @@ -624,6 +626,7 @@ def test_slicethickness_fallback(self): assert_equal(dw.voxel_sizes[2], 1.0) @dicom_test + @needs_nibabel_data('nitest-dicom') def test_data_derived_shape(self): # Test 4D diffusion data with an additional trace volume included # Excludes the trace volume and generates the correct shape From 7239a36b053c242de8e367db895a57eba1779842 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 21 Aug 2019 16:40:11 -0400 Subject: [PATCH 231/689] MNT: Update extras --- setup.cfg | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index ebfeb72f39..b2b3ecbcf7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,12 +40,19 @@ include_package_data = True [options.extras_require] dicom = - dicom >=0.9.9 + pydicom >=0.9.9 doc = + matplotlib >= 1.3.1 + mock + numpydoc sphinx >=0.3 + texext +style = + flake8 test = - nose >=0.10.1 + coverage mock + nose >=0.10.1 all = %(dicom)s %(doc)s From d5ce2e719d944fe153641c79eebc6772b1aa28fb Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 21 Aug 2019 16:40:51 -0400 Subject: [PATCH 232/689] CI: Refactor Travis to separate steps more cleanly, use extras to for dependencies --- .travis.yml | 92 +++++++++++++++++++++++++---------------------------- 1 file changed, 43 insertions(+), 49 deletions(-) diff --git a/.travis.yml b/.travis.yml index e10d99db31..4c1e535651 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,32 +13,36 @@ cache: - $HOME/.cache/pip env: global: - - DEPENDS="six numpy scipy matplotlib h5py pillow pydicom indexed_gzip" + - SETUP_REQUIRES="pip setuptools>=30.3.0 wheel" + - DEPENDS="numpy scipy matplotlib h5py pillow pydicom indexed_gzip" - INSTALL_TYPE="setup" - CHECK_TYPE="test" - EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" + python: - 3.6 - 3.7 + matrix: include: - # Absolute minimum dependencies + # Basic dependencies only + - python: 3.5 + env: + - DEPENDS="-r requirements.txt" + # Clean install - python: 3.5 env: - - DEPENDS="-r min-requirements.txt setuptools==30.3.0" + - DEPENDS="" + - CHECK_TYPE=skiptests # Absolute minimum dependencies - python: 3.5 env: + - SETUP_REQUIRES="setuptools==30.3.0" - DEPENDS="-r min-requirements.txt" - - CHECK_TYPE="import" # Absolute minimum dependencies plus oldest MPL - # Check these against: - # nibabel/info.py - # doc/source/installation.rst - # requirements.txt - python: 3.5 env: - DEPENDS="-r min-requirements.txt matplotlib==1.3.1" @@ -60,9 +64,6 @@ matrix: - python: 3.5 env: - INSTALL_TYPE=wheel - - python: 3.5 - env: - - INSTALL_TYPE=requirements - python: 3.5 env: - INSTALL_TYPE=archive @@ -72,60 +73,55 @@ matrix: # Documentation doctests - python: 3.5 env: - - CHECK_TYPE="doc_doctests" + - CHECK_TYPE="doc" +# Set up virtual environment, build package, build from depends before_install: - - travis_retry python -m pip install --upgrade pip - - travis_retry pip install --upgrade virtualenv + - travis_retry python -m pip install --upgrade pip virtualenv - virtualenv --python=python venv - source venv/bin/activate - python --version # just to check - - travis_retry pip install -U pip setuptools>=27.0 wheel - - travis_retry pip install coverage - - if [ "${CHECK_TYPE}" == "test" ]; then - travis_retry pip install nose mock; - fi - - if [ "${CHECK_TYPE}" == "style" ]; then - travis_retry pip install flake8; + - travis_retry pip install -U $SETUP_REQUIRES + - | + if [ "$INSTALL_TYPE" == "sdist" ]; then + python setup.py egg_info # check egg_info while we're here + python setup.py sdist + export ARCHIVE=$( ls dist/*.tar.gz ) + elif [ "$INSTALL_TYPE" == "wheel" ]; then + python setup.py bdist_wheel + export ARCHIVE=$( ls dist/*.whl ) + elif [ "$INSTALL_TYPE" == "archive" ]; then + export ARCHIVE="package.tar.gz" + git archive -o $ARCHIVE HEAD fi - - travis_retry pip install $EXTRA_PIP_FLAGS $DEPENDS + - if [ -n "$DEPENDS" ]; then pip install $EXTRA_PIP_FLAGS $DEPENDS; fi # command to install dependencies install: - | if [ "$INSTALL_TYPE" == "setup" ]; then python setup.py install - elif [ "$INSTALL_TYPE" == "sdist" ]; then - python setup_egg.py egg_info # check egg_info while we're here - python setup_egg.py sdist - pip install $EXTRA_PIP_FLAGS dist/*.tar.gz - elif [ "$INSTALL_TYPE" == "wheel" ]; then - python setup_egg.py bdist_wheel - pip install $EXTRA_PIP_FLAGS dist/*.whl - elif [ "$INSTALL_TYPE" == "requirements" ]; then - pip install $EXTRA_PIP_FLAGS -r requirements.txt - python setup.py install - elif [ "$INSTALL_TYPE" == "archive" ]; then - git archive -o package.tar.gz HEAD - pip install $EXTRA_PIP_FLAGS package.tar.gz + else + pip install $EXTRA_PIP_FLAGS $ARCHIVE fi + # Basic import check + - python -c 'import nibabel; print(nibabel.__version__)' + - if [ "$CHECK_TYPE" == "skiptests" ]; then exit 0; fi + +before_script: # Point to nibabel data directory - export NIBABEL_DATA_DIR="$PWD/nibabel-data" + # Because nibabel is already installed, will just look up the extra + - pip install $EXTRA_PIP_FLAGS "nibabel[$CHECK_TYPE]" + # command to run tests, e.g. python setup.py test script: - | if [ "${CHECK_TYPE}" == "style" ]; then # Run styles only on core nibabel code. flake8 nibabel - elif [ "${CHECK_TYPE}" == "import" ]; then - # Import nibabel without attempting to test - # Allows us to check missing dependencies masked by testing libraries - printf 'import nibabel\nprint(nibabel.__version__)\n' > import_only.py - cat import_only.py - coverage run import_only.py - elif [ "${CHECK_TYPE}" == "doc_doctests" ]; then + elif [ "${CHECK_TYPE}" == "doc" ]; then cd doc - pip install -r ../doc-requirements.txt make html; make doctest; elif [ "${CHECK_TYPE}" == "test" ]; then @@ -137,12 +133,10 @@ script: else false fi -after_success: - - | - if [ "${CHECK_TYPE}" == "test" ]; then - travis_retry pip install codecov - codecov - fi + +after_script: + - travis_retry pip install codecov + - codecov notifications: webhooks: http://nipy.bic.berkeley.edu:54856/travis From 6515c8ae7093848c23750cc303a8f403f1a10bd6 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 21 Aug 2019 16:41:11 -0400 Subject: [PATCH 233/689] MNT: setup_egg.py now unused --- setup_egg.py | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 setup_egg.py diff --git a/setup_egg.py b/setup_egg.py deleted file mode 100644 index b67a2d9405..0000000000 --- a/setup_egg.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Wrapper to run setup.py using setuptools.""" - -import setuptools # flake8: noqa ; needed to monkeypatch dist_utils - -############################################################################### -# Call the setup.py script, injecting the setuptools-specific arguments. - -if __name__ == '__main__': - exec(open('setup.py', 'rt').read(), dict(__name__='__main__')) From d0e1bbdcf5a35b1f99260c7037ebfcdf229e0584 Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Thu, 29 Aug 2019 12:41:16 -0500 Subject: [PATCH 234/689] Fix hardcoded maximum number of CSA tags --- nibabel/nicom/csareader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 9847b72d28..f44d0a20c6 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -98,9 +98,9 @@ def read(csa_str): hdr_type = 1 csa_dict['type'] = hdr_type csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') - if not 0 < csa_dict['n_tags'] <= 128: + if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= 128') + '0 < t <= %s'%MAX_CSA_ITEMS) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ up_str.unpack('64si4s3i') From 19aa45927bc44296f8bac0df5369654c6da8edde Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Thu, 29 Aug 2019 12:51:08 -0500 Subject: [PATCH 235/689] fix some whitespace --- nibabel/nicom/csareader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index f44d0a20c6..a2d5a76b4d 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -100,7 +100,7 @@ def read(csa_str): csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= %s'%MAX_CSA_ITEMS) + '0 < t <= %s' % MAX_CSA_ITEMS) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ up_str.unpack('64si4s3i') From 98c68afdda93f6ee7cbc601afab3abebbdfd40b1 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Sat, 31 Aug 2019 10:43:58 +0100 Subject: [PATCH 236/689] limit stacking to timeseries data only --- nibabel/gifti/gifti.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index b2c433fe8e..5b341ddfc9 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -706,13 +706,16 @@ def agg_data(self, intent_code=None): darrays = self.darrays if intent_code is None else self.get_arrays_from_intent(intent_code) all_data = tuple(da.data for da in darrays) - all_intent = tuple(intent_codes.niistring[da.intent] for da in darrays) + all_intent = {da.intent for da in darrays} # Gifti files allows usually one or more data array of the same intent code # surf.gii is a special case of having two data array of different intent code - if (self.numDA > 1 and all(el == all_intent[0] for el in all_intent)): - return np.column_stack(all_data) + if self.numDA > 1 and len(all_intent) == 1: + if all_intent== 'NIFTI_INTENT_TIME_SERIES': # stack when the gifti is a timeseries + return np.column_stack(all_data) + else: + return all_data else: return all_data From 4b1b9602129e3e0f9ab6df7a5360feaf219f8ef4 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Sun, 1 Sep 2019 16:56:13 +0200 Subject: [PATCH 237/689] RF: compare data types using .dtype.type to avoid effect of endianness Rationale is that in a typical case user is interested to check that file contains correct data. Comparison with effect of endianness would probably be more of interest to developers. May be someone later would add a flag to nib-diff? --- nibabel/cmdline/diff.py | 8 +++++++- nibabel/tests/test_diff.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/nibabel/cmdline/diff.py b/nibabel/cmdline/diff.py index 4b8b69381c..635b8a50cb 100755 --- a/nibabel/cmdline/diff.py +++ b/nibabel/cmdline/diff.py @@ -9,6 +9,11 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Quick summary of the differences among a set of neuroimaging files + +Notes: + - difference in data types for header fields will be detected, but + endianness difference will not be detected. It is done so to compare files + with native endianness used in data files. """ from __future__ import division, print_function, absolute_import @@ -99,7 +104,8 @@ def are_values_different(*values): if type(value0) != type(value): # if types are different, then we consider them different return True elif isinstance(value0, np.ndarray): - if value0.dtype != value.dtype or \ + # use .dtype.type to provide endianness agnostic comparison + if value0.dtype.type != value.dtype.type or \ value0.shape != value.shape: return True # there might be nans and they need special treatment diff --git a/nibabel/tests/test_diff.py b/nibabel/tests/test_diff.py index 4f99ca145f..ad672b7a65 100644 --- a/nibabel/tests/test_diff.py +++ b/nibabel/tests/test_diff.py @@ -72,3 +72,9 @@ def test_diff_values_array(): # and some inf should not be a problem assert not are_values_different(array([0, inf]), array([0, inf])) assert are_values_different(array([0, inf]), array([inf, 0])) + + # we will allow for types to be of different endianness but the + # same in "instnatiation" type and value + assert not are_values_different(np.array(1, dtype=' Date: Sun, 1 Sep 2019 11:31:17 -0400 Subject: [PATCH 238/689] Trigger CI From 250ccf893f448823e4240776e36de376bc864f3c Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Tue, 3 Sep 2019 09:26:20 -0500 Subject: [PATCH 239/689] Update nibabel/nicom/csareader.py Co-Authored-By: Chris Markiewicz --- nibabel/nicom/csareader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index a2d5a76b4d..de2b5dbb1a 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -100,7 +100,7 @@ def read(csa_str): csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= %s' % MAX_CSA_ITEMS) + '0 < t <= %d' % MAX_CSA_ITEMS) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ up_str.unpack('64si4s3i') From 08f6dd30043208ac2ef7fb3cebbf8a5a8c0a2716 Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Thu, 29 Aug 2019 12:41:16 -0500 Subject: [PATCH 240/689] BF: Fix hardcoded maximum number of CSA tags Backport of https://github.com/nipy/nibabel/pull/798 --- nibabel/nicom/csareader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 9847b72d28..de2b5dbb1a 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -98,9 +98,9 @@ def read(csa_str): hdr_type = 1 csa_dict['type'] = hdr_type csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') - if not 0 < csa_dict['n_tags'] <= 128: + if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= 128') + '0 < t <= %d' % MAX_CSA_ITEMS) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ up_str.unpack('64si4s3i') From 52c85270f61804f8ebedf05502ee38beb5b117d0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 10:39:22 -0400 Subject: [PATCH 241/689] CI: Add expected failing test --- appveyor.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 796c74a7da..9ab441f7fa 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -2,6 +2,7 @@ # CI on Windows via appveyor environment: + DEPENDS: numpy scipy matplotlib h5py pydicom matrix: - PYTHON: C:\Python27 @@ -10,6 +11,9 @@ environment: - PYTHON: C:\Python34-x64 - PYTHON: C:\Python35 - PYTHON: C:\Python35-x64 + - PYTHON: C:\Python35-x64 + PYTHONHASHSEED: 283137131 + DEPENDS: "h5py==2.9.0" - PYTHON: C:\Python36 - PYTHON: C:\Python36-x64 - PYTHON: C:\Python37 @@ -28,7 +32,7 @@ install: # Install the dependencies of the project. - pip install --upgrade pip setuptools>=27.0 wheel - - pip install numpy scipy matplotlib h5py pydicom + - pip install %DEPENDS% - pip install nose mock coverage codecov - pip install . - SET NIBABEL_DATA_DIR=%CD%\nibabel-data From 12b2ad1cd7564ff024e66af4f43aa92d245abfe8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 11:07:06 -0400 Subject: [PATCH 242/689] CI: Temporarily drop all other tests --- appveyor.yml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 9ab441f7fa..d946d4194b 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -5,19 +5,9 @@ environment: DEPENDS: numpy scipy matplotlib h5py pydicom matrix: - - PYTHON: C:\Python27 - - PYTHON: C:\Python27-x64 - - PYTHON: C:\Python34 - - PYTHON: C:\Python34-x64 - - PYTHON: C:\Python35 - - PYTHON: C:\Python35-x64 - PYTHON: C:\Python35-x64 PYTHONHASHSEED: 283137131 DEPENDS: "h5py==2.9.0" - - PYTHON: C:\Python36 - - PYTHON: C:\Python36-x64 - - PYTHON: C:\Python37 - - PYTHON: C:\Python37-x64 install: # Prepend newly installed Python to the PATH of this build (this cannot be From 9dec496878544ad5d1d22e2132fe95dc8763909b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 11:07:46 -0400 Subject: [PATCH 243/689] ENH: Require h5py 2.10 for Windows + Python < 3.6 --- nibabel/minc2.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 40f38f97b3..edf553de2e 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -25,11 +25,21 @@ mincstats my_funny.mnc """ +import sys +import os import numpy as np from .keywordonly import kw_only_meth from .optpkg import optional_package -h5py, have_h5py, setup_module = optional_package('h5py') + +# PY35: A bug affected Windows installations of h5py in Python3 versions <3.6 +# due to random dictionary ordering, causing float64 data arrays to sometimes be +# loaded as longdouble (also 64 bit on Windows). This caused stochastic failures +# to correctly handle data caches, and possibly other subtle bugs we never +# caught. This was fixed in h5py 2.10. +# Please see https://github.com/nipy/nibabel/issues/665 for details. +min_h5py = '2.10' if os.name == 'nt' and (3,) <= sys.version_info < (3, 6) else None +h5py, have_h5py, setup_module = optional_package('h5py', min_version=min_h5py) from .minc1 import Minc1File, MincHeader, Minc1Image, MincError From 0a29f4526598ae2a73ae20369c748272ca62cfe4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 12:15:01 -0400 Subject: [PATCH 244/689] TEST: Import have_h5py and similar from nibabel.minc2 --- nibabel/tests/test_image_api.py | 2 +- nibabel/tests/test_imageclasses.py | 5 +---- nibabel/tests/test_minc2.py | 6 +----- nibabel/tests/test_minc2_data.py | 4 +--- nibabel/tests/test_proxy_api.py | 3 +-- 5 files changed, 5 insertions(+), 15 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index ac2a2428c4..169660cc90 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -34,7 +34,7 @@ from ..optpkg import optional_package _, have_scipy, _ = optional_package('scipy') -_, have_h5py, _ = optional_package('h5py') +from ..minc2 import have_h5py from .. import (AnalyzeImage, Spm99AnalyzeImage, Spm2AnalyzeImage, Nifti1Pair, Nifti1Image, Nifti2Pair, Nifti2Image, diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 3c3c437136..5060e23e05 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -6,12 +6,11 @@ import numpy as np -from nibabel.optpkg import optional_package - import nibabel as nib from nibabel.analyze import AnalyzeImage from nibabel.nifti1 import Nifti1Image from nibabel.nifti2 import Nifti2Image +from ..minc2 import have_h5py from nibabel import imageclasses from nibabel.imageclasses import spatial_axes_first, class_map, ext_map @@ -23,8 +22,6 @@ DATA_DIR = pjoin(dirname(__file__), 'data') -have_h5py = optional_package('h5py')[1] - MINC_3DS = ('minc1_1_scale.mnc',) MINC_4DS = ('minc1_4d.mnc',) if have_h5py: diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index c4cb9341ca..23eff702b3 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -12,12 +12,8 @@ import numpy as np -from ..optpkg import optional_package - -h5py, have_h5py, setup_module = optional_package('h5py') - from .. import minc2 -from ..minc2 import Minc2File, Minc2Image +from ..minc2 import Minc2File, Minc2Image, h5py, have_h5py, setup_module from nose.tools import (assert_true, assert_equal, assert_false, assert_raises) diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index 1ec4999a43..8b6fdfb78f 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -15,9 +15,7 @@ import numpy as np -from nibabel.optpkg import optional_package - -h5py, have_h5py, setup_module = optional_package('h5py') +from ..minc2 import h5py, have_h5py, setup_module from .nibabel_data import get_nibabel_data, needs_nibabel_data from .. import load as top_load, Nifti1Image diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 7280c5552d..1e329ae230 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -46,8 +46,7 @@ from .. import minc1 from ..externals.netcdf import netcdf_file from .. import minc2 -from ..optpkg import optional_package -h5py, have_h5py, _ = optional_package('h5py') +from ..minc2 import h5py, have_h5py from .. import ecat from .. import parrec From 622dfa6214043284f135509c97c4afa593c57d1e Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 12:22:40 -0400 Subject: [PATCH 245/689] FIX: More robust floating point dtype check --- nibabel/minc1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/minc1.py b/nibabel/minc1.py index a8535eec05..1ca25eaf9c 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -173,7 +173,7 @@ def _normalize(self, data, sliceobj=()): applied to `data` """ ddt = self.get_data_dtype() - if ddt.type in np.sctypes['float']: + if np.issubdtype(ddt.type, np.floating): return data image_max = self._image_max image_min = self._image_min From 9f940138d541c65d3373165b64a6c63462ad67c9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 14:44:10 -0400 Subject: [PATCH 246/689] Revert "CI: Temporarily drop all other tests" This reverts commit 12b2ad1cd7564ff024e66af4f43aa92d245abfe8. --- appveyor.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/appveyor.yml b/appveyor.yml index d946d4194b..9ab441f7fa 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -5,9 +5,19 @@ environment: DEPENDS: numpy scipy matplotlib h5py pydicom matrix: + - PYTHON: C:\Python27 + - PYTHON: C:\Python27-x64 + - PYTHON: C:\Python34 + - PYTHON: C:\Python34-x64 + - PYTHON: C:\Python35 + - PYTHON: C:\Python35-x64 - PYTHON: C:\Python35-x64 PYTHONHASHSEED: 283137131 DEPENDS: "h5py==2.9.0" + - PYTHON: C:\Python36 + - PYTHON: C:\Python36-x64 + - PYTHON: C:\Python37 + - PYTHON: C:\Python37-x64 install: # Prepend newly installed Python to the PATH of this build (this cannot be From 0032049ebf1421e4f8d8b1e1349dcd70a821cd95 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 9 Sep 2019 16:39:04 -0400 Subject: [PATCH 247/689] CI: Require binaries for 3.4 as dependencies stop building wheels --- appveyor.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/appveyor.yml b/appveyor.yml index 9ab441f7fa..aba8c60380 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -8,7 +8,9 @@ environment: - PYTHON: C:\Python27 - PYTHON: C:\Python27-x64 - PYTHON: C:\Python34 + DEPENDS: --prefer-binary numpy scipy matplotlib h5py pydicom - PYTHON: C:\Python34-x64 + DEPENDS: --prefer-binary numpy scipy matplotlib h5py pydicom - PYTHON: C:\Python35 - PYTHON: C:\Python35-x64 - PYTHON: C:\Python35-x64 From 448e073e62510e55621f3548c4d4f4abd6b17689 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 10 Sep 2019 10:42:23 +0100 Subject: [PATCH 248/689] Update nibabel/gifti/gifti.py Co-Authored-By: Chris Markiewicz --- nibabel/gifti/gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 5b341ddfc9..f2219ce38c 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -694,7 +694,7 @@ def agg_data(self, intent_code=None): Returns ------- - all_data : tuple of ndarray or ndarray + tuple of ndarrays or ndarray If the input is a tuple, the returned tuple will match the order. """ From 15d1dd73c97502b68b5964a3ac7adcc1a65f1470 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 10 Sep 2019 10:42:34 +0100 Subject: [PATCH 249/689] Update nibabel/gifti/gifti.py Co-Authored-By: Chris Markiewicz --- nibabel/gifti/gifti.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index f2219ce38c..52c25c24e2 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -690,7 +690,6 @@ def agg_data(self, intent_code=None): ---------- intent_code : None, string, integer or tuple of string, optional Intent code, or string describing code. - Accept tuple that contains multiple intents to specify the order. Returns ------- From 85f65a4abbcdd0fa12956b0d539aff5c38a1154c Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 10 Sep 2019 10:42:42 +0100 Subject: [PATCH 250/689] Update nibabel/gifti/gifti.py Co-Authored-By: Chris Markiewicz --- nibabel/gifti/gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 52c25c24e2..f7b2412e29 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -689,7 +689,7 @@ def agg_data(self, intent_code=None): Parameters ---------- intent_code : None, string, integer or tuple of string, optional - Intent code, or string describing code. + code(s) specifying nifti intent Returns ------- From de8f0281e3a27a3c4bc17f911463ae2394220f61 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 10 Sep 2019 10:42:52 +0100 Subject: [PATCH 251/689] Update nibabel/gifti/gifti.py Co-Authored-By: Chris Markiewicz --- nibabel/gifti/gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index f7b2412e29..7c465370a4 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -688,7 +688,7 @@ def agg_data(self, intent_code=None): Parameters ---------- - intent_code : None, string, integer or tuple of string, optional + intent_code : None, string, integer or tuple of strings or integers, optional code(s) specifying nifti intent Returns From 4127cfccd495e856f3b39883fffa896f411fb6f7 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 10 Sep 2019 10:00:07 -0400 Subject: [PATCH 252/689] RF/TEST: Move h5py compatibility hacks into a private module --- nibabel/_h5py_compat.py | 12 ++++++++ nibabel/minc2.py | 13 +-------- nibabel/tests/test_h5py_compat.py | 44 ++++++++++++++++++++++++++++++ nibabel/tests/test_image_api.py | 2 +- nibabel/tests/test_imageclasses.py | 2 +- nibabel/tests/test_minc2.py | 3 +- nibabel/tests/test_minc2_data.py | 2 +- nibabel/tests/test_proxy_api.py | 2 +- 8 files changed, 63 insertions(+), 17 deletions(-) create mode 100644 nibabel/_h5py_compat.py create mode 100644 nibabel/tests/test_h5py_compat.py diff --git a/nibabel/_h5py_compat.py b/nibabel/_h5py_compat.py new file mode 100644 index 0000000000..2c0b0eb2c0 --- /dev/null +++ b/nibabel/_h5py_compat.py @@ -0,0 +1,12 @@ +import sys +import os +from .optpkg import optional_package + +# PY35: A bug affected Windows installations of h5py in Python3 versions <3.6 +# due to random dictionary ordering, causing float64 data arrays to sometimes be +# loaded as longdouble (also 64 bit on Windows). This caused stochastic failures +# to correctly handle data caches, and possibly other subtle bugs we never +# caught. This was fixed in h5py 2.10. +# Please see https://github.com/nipy/nibabel/issues/665 for details. +min_h5py = '2.10' if os.name == 'nt' and (3,) <= sys.version_info < (3, 6) else None +h5py, have_h5py, setup_module = optional_package('h5py', min_version=min_h5py) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index edf553de2e..37821409c4 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -25,21 +25,10 @@ mincstats my_funny.mnc """ -import sys -import os import numpy as np from .keywordonly import kw_only_meth -from .optpkg import optional_package - -# PY35: A bug affected Windows installations of h5py in Python3 versions <3.6 -# due to random dictionary ordering, causing float64 data arrays to sometimes be -# loaded as longdouble (also 64 bit on Windows). This caused stochastic failures -# to correctly handle data caches, and possibly other subtle bugs we never -# caught. This was fixed in h5py 2.10. -# Please see https://github.com/nipy/nibabel/issues/665 for details. -min_h5py = '2.10' if os.name == 'nt' and (3,) <= sys.version_info < (3, 6) else None -h5py, have_h5py, setup_module = optional_package('h5py', min_version=min_h5py) +from ._h5py_compat import h5py from .minc1 import Minc1File, MincHeader, Minc1Image, MincError diff --git a/nibabel/tests/test_h5py_compat.py b/nibabel/tests/test_h5py_compat.py new file mode 100644 index 0000000000..26d70b6e55 --- /dev/null +++ b/nibabel/tests/test_h5py_compat.py @@ -0,0 +1,44 @@ +""" +These tests are almost certainly overkill, but serve to verify that +the behavior of _h5py_compat is pass-through in all but a small set of +well-defined cases +""" +import sys +import os +from distutils.version import LooseVersion +import numpy as np + +from ..optpkg import optional_package +from .. import _h5py_compat as compat +from ..testing import assert_equal, assert_true, assert_false, assert_not_equal + +h5py, have_h5py, _ = optional_package('h5py') + + +def test_optpkg_equivalence(): + # No effect on Linux/OSX + if os.name == 'posix': + assert_equal(have_h5py, compat.have_h5py) + # No effect on Python 2.7 or 3.6+ + if sys.version_info >= (3, 6) or sys.version_info < (3,): + assert_equal(have_h5py, compat.have_h5py) + # Available in a strict subset of cases + if not have_h5py: + assert_false(compat.have_h5py) + # Available when version is high enough + elif LooseVersion(h5py.__version__) >= '2.10': + assert_true(compat.have_h5py) + + +def test_disabled_h5py_cases(): + # On mismatch + if have_h5py and not compat.have_h5py: + # Recapitulate min_h5py conditions from _h5py_compat + assert_equal(os.name, 'nt') + assert_true((3,) <= sys.version_info < (3, 6)) + assert_true(LooseVersion(h5py.__version__) < '2.10') + # Verify that the root cause is present + # If any tests fail, they will likely be these, so they may be + # ill-advised... + assert_equal(str(np.longdouble), str(np.float64)) + assert_not_equal(np.longdouble, np.float64) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 169660cc90..9df362e657 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -34,7 +34,7 @@ from ..optpkg import optional_package _, have_scipy, _ = optional_package('scipy') -from ..minc2 import have_h5py +from .._h5py_compat import have_h5py from .. import (AnalyzeImage, Spm99AnalyzeImage, Spm2AnalyzeImage, Nifti1Pair, Nifti1Image, Nifti2Pair, Nifti2Image, diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 5060e23e05..12232c42e4 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -10,7 +10,7 @@ from nibabel.analyze import AnalyzeImage from nibabel.nifti1 import Nifti1Image from nibabel.nifti2 import Nifti2Image -from ..minc2 import have_h5py +from .._h5py_compat import have_h5py from nibabel import imageclasses from nibabel.imageclasses import spatial_axes_first, class_map, ext_map diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index 23eff702b3..93bff42a47 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -13,7 +13,8 @@ import numpy as np from .. import minc2 -from ..minc2 import Minc2File, Minc2Image, h5py, have_h5py, setup_module +from ..minc2 import Minc2File, Minc2Image +from .._h5py_compat import h5py, have_h5py, setup_module from nose.tools import (assert_true, assert_equal, assert_false, assert_raises) diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index 8b6fdfb78f..d5b19fe9ce 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -15,7 +15,7 @@ import numpy as np -from ..minc2 import h5py, have_h5py, setup_module +from .._h5py_compat import h5py, have_h5py, setup_module from .nibabel_data import get_nibabel_data, needs_nibabel_data from .. import load as top_load, Nifti1Image diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 1e329ae230..074f2de5b2 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -46,7 +46,7 @@ from .. import minc1 from ..externals.netcdf import netcdf_file from .. import minc2 -from ..minc2 import h5py, have_h5py +from .._h5py_compat import h5py, have_h5py from .. import ecat from .. import parrec From d9463a274c83c420f203a24261dbf8871a6efd3f Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 23 Aug 2019 13:26:18 -0400 Subject: [PATCH 253/689] ENH: deprecate Wrapper.get_affine - use affine property To stay inline with the regular interfaces --- nibabel/nicom/dicomwrappers.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 14a92e1c5f..367a846dcb 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -23,6 +23,7 @@ from ..openers import ImageOpener from ..onetime import setattr_on_read as one_time from ..pydicom_compat import tag_for_keyword, Sequence +from ..deprecated import deprecate_with_version class WrapperError(Exception): @@ -286,12 +287,16 @@ def get(self, key, default=None): """ Get values from underlying dicom data """ return self.dcm_data.get(key, default) + @deprecate_with_version('get_affine method is deprecated.\n' + 'Please use the ``img.affine`` property ' + 'instead.', + '2.5.1', '4.0') def get_affine(self): - """ Return mapping between voxel and DICOM coordinate system + return self.affine - Parameters - ---------- - None + @property + def affine(self): + """ Mapping between voxel and DICOM coordinate system Returns ------- From 0932c6d118e89f97a6327d61724e79bcae5c934b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 10 Sep 2019 21:20:41 -0400 Subject: [PATCH 254/689] DOC: Update docstrings to describe affine property --- nibabel/nicom/dicomwrappers.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 367a846dcb..bd44e958ae 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -96,7 +96,7 @@ class Wrapper(object): Methods: - * get_affine() + * get_affine() (deprecated, use affine property instead) * get_data() * get_pixel_array() * is_same_series(other) @@ -105,6 +105,7 @@ class Wrapper(object): Attributes and things that look like attributes: + * affine : (4, 4) array * dcm_data : object * image_shape : tuple * image_orient_patient : (3,2) array @@ -298,11 +299,8 @@ def get_affine(self): def affine(self): """ Mapping between voxel and DICOM coordinate system - Returns - ------- - aff : (4,4) affine - Affine giving transformation between voxels in data array and - mm in the DICOM patient coordinate system. + (4, 4) affine matrix giving transformation between voxels in data array + and mm in the DICOM patient coordinate system. """ # rotation matrix already accounts for the ij transpose in the # DICOM image orientation patient transform. So. column 0 is From 920eefa4cd421a5d46a041149f457dd71c5e7d86 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 11 Sep 2019 10:15:19 -0400 Subject: [PATCH 255/689] CI: Add expected failing test to Azure --- .azure-pipelines/windows.yml | 6 +++++- azure-pipelines.yml | 5 +++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index ad1b72eb77..0cda80d6ee 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -10,6 +10,7 @@ jobs: vmImage: ${{ parameters.vmImage }} variables: EXTRA_WHEELS: "https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" + DEPENDS: numpy scipy matplotlib h5py pydicom strategy: matrix: ${{ insert }}: ${{ parameters.matrix }} @@ -20,11 +21,14 @@ jobs: versionSpec: '$(PYTHON_VERSION)' addToPath: true architecture: '$(PYTHON_ARCH)' + - script: | + echo %PYTHONHASHSEED% + displayName: 'Display hash seed' - script: | python -m pip install --upgrade pip setuptools>=30.3.0 wheel displayName: 'Update build tools' - script: | - python -m pip install --find-links %EXTRA_WHEELS% numpy scipy matplotlib h5py pydicom + python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% python -m pip install nose mock coverage codecov displayName: 'Install dependencies' - script: | diff --git a/azure-pipelines.yml b/azure-pipelines.yml index cb2612d5c3..f595ec35b7 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,6 +11,11 @@ jobs: py35-x64: PYTHON_VERSION: '3.5' PYTHON_ARCH: 'x64' + py35-h5py-check: + PYTHON_VERSION: '3.5' + PYTHON_ARCH: 'x64' + PYTHONHASHSEED: 283137131 + DEPENDS: "h5py==2.9.0" py36-x86: PYTHON_VERSION: '3.6' PYTHON_ARCH: 'x86' From 154321c05d8d6c1df048ad9ded6d07c5aee62923 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Thu, 12 Sep 2019 10:36:40 +0100 Subject: [PATCH 256/689] doc string draft 1, need to finish the examples --- nibabel/gifti/gifti.py | 60 +++++++++++++++++++++++++++++++++--------- 1 file changed, 47 insertions(+), 13 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 7c465370a4..ebe648744e 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -682,9 +682,46 @@ def get_arrays_from_intent(self, intent): def agg_data(self, intent_code=None): """ - Retrun a numpy arrary of aggregated GiftiDataArray of the same intent code - or - Retrun GiftiDataArray in tuples for surface files + Aggregate GIFTI data arrays into an ndarray or tuple of ndarray + + In the general case, the numpy data array is extracted from each ``GiftiDataArray`` + object and returned in a ``tuple``, in the order they are found in the GIFTI image. + + If all ``GiftiDataArray``s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``), + then the data arrays are concatenated as columns, producing a vertex-by-time array. + If an ``intent_code`` is passed, data arrays are filtered by the selected intents, + before being aggregated. + This may be useful for images containing several intents, or ensuring an expected + data type in an image of uncertain provenance. + If ``intent_code`` is a ``tuple``, then a ``tuple`` will be returned with the result of + ``agg_data`` for each element, in order. + This may be useful for ensuring that expected data arrives in a consistent order. + + Examples: + >>> import nibabel as nib + >>> gii_fname = # something/something.surf.gii + >>> gii_img = nib.load(gii_fname) + + When not passing anything to``intent_code`` + >>> gii_img.agg_data() + + When passig matching intend codes ``intent_code`` + >>> gii_img.agg_data('pointset') + + >>> gii_img.agg_data('triangle') + + >>> gii_img.agg_data('time series') + + When passing mismatching intent codes ``intent_code`` + >>> gii_img.agg_data('time series') + () # return a empty ``tuple`` + + When passing tuple ``intent_code`` + >>> gii_img.agg_data(('pointset', 'triangle')) + + >>> gii_img.agg_data(('triangle', 'pointset')) + + Parameters ---------- @@ -705,18 +742,15 @@ def agg_data(self, intent_code=None): darrays = self.darrays if intent_code is None else self.get_arrays_from_intent(intent_code) all_data = tuple(da.data for da in darrays) - all_intent = {da.intent for da in darrays} + all_intent = {intent_codes.niistring[da.intent] for da in darrays} - # Gifti files allows usually one or more data array of the same intent code - # surf.gii is a special case of having two data array of different intent code + if all_intent == {'NIFTI_INTENT_TIME_SERIES'}: # stack when the gifti is a timeseries + return np.column_stack(all_data) - if self.numDA > 1 and len(all_intent) == 1: - if all_intent== 'NIFTI_INTENT_TIME_SERIES': # stack when the gifti is a timeseries - return np.column_stack(all_data) - else: - return all_data - else: - return all_data + if len(all_data) == 1: + all_data = all_data[0] + + return all_data @deprecate_with_version( 'getArraysFromIntent method deprecated. ' From 0530484a4791779b3a58c52e0eaa70d2705168e5 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 12 Sep 2019 09:43:17 -0400 Subject: [PATCH 257/689] ENH: Add general test data retriever --- nibabel/testing/__init__.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 16f2112299..4deb26aafa 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -13,6 +13,7 @@ import os import sys import warnings +from pkg_resources import resource_filename from os.path import dirname, abspath, join as pjoin import numpy as np @@ -32,8 +33,23 @@ from six.moves import zip_longest + +def test_data(subdir=None, fname=None): + if subdir is None: + resource = 'tests/data' + elif subdir in ('gifti', 'nicom', 'externals'): + resource = '%s/tests/data' % subdir + else: + raise ValueError("Unknown test data directory: %s" % subdir) + + if fname is not None: + resource = os.path.join(resource, fname) + + return resource_filename('nibabel', resource) + + # set path to example data -data_path = abspath(pjoin(dirname(__file__), '..', 'tests', 'data')) +data_path = test_data() from .np_features import memmap_after_ufunc From 8c6cd7d9c221fd5d9b380157f181d1266b513703 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 12 Sep 2019 09:45:02 -0400 Subject: [PATCH 258/689] DOCTEST: Retreive a surface file using test_data --- nibabel/gifti/gifti.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index ebe648744e..ddf761d4b8 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -699,7 +699,8 @@ def agg_data(self, intent_code=None): Examples: >>> import nibabel as nib - >>> gii_fname = # something/something.surf.gii + >>> from nibabel.testing import test_data + >>> gii_fname = test_data('gifti', 'ascii.gii') >>> gii_img = nib.load(gii_fname) When not passing anything to``intent_code`` From c54b696f829e9d0d260e040b30d2eb546a03847a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 12 Sep 2019 09:45:52 -0400 Subject: [PATCH 259/689] DATA: Add 10 time point time series GIFTI in fsaverage3 space --- ...ejudgment_space-fsaverage3_hemi-L.func.gii | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii diff --git a/nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii b/nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii new file mode 100644 index 0000000000..0814a6dd33 --- /dev/null +++ b/nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii @@ -0,0 +1,33 @@ + + +UserNamemarkiewiczDateThu Sep 12 08:59:28 2019gifticlib-versiongifti library version 1.09, 28 June, 2010TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lnlU1dUWx0EZLtwB7sC993dHLsgFCnxokJmFwPmQklQWiYpEvnhmapRTYkI8ns9ZK4ckRxwrhxWFE44kli8wNXAiyzEQQVSUUBBB3s2WrdUf7+1/ztl7n3X2WWd/v99zvh4ho8wtkDt9JZLz3fkxRcvh6062jDfj5rKN2x2k9+kSH3i3i/vJJ8WG77wo6G9kQ5OWr1Z68dNBG6XbDEy8auPxJ/SUPm2lxzVvKkptyHdbUAfbSIiSE3XbyLrzNr683Z2yMhN+OU5eSPHn6DSJnFCfh7VOfGNlwORZQrp0VJy/s0B4zv9NjFyrfJjLH+xHVIOOd4qCmJBvIjBVYke8HtV7Sirq/Mic3C4GLbBhlStZ7qlldoSJxFYtw7M1PDvfREChL9+ZNRzOMZJdG0hbgYKM5xpEpEc4v+WYGd5k4tI5Hd81+JOdamD5TgN7Z1qonGfkYJdEZZKe7vv8GZDtjbungZJUBf80mJgyxIdjP7szcYuKM9PVhISraf7Cyo4PPenp4cnQXDOJA0NIKnDwicqK7rCZpDYbQ1eZ2RXsxey3bRxYYeYuWuSuvXMXGtg/XscVlRLFDiepFhsxPUN4q6+F+qG+LPvewt2WQNZNsbFnrz+XhxtxLlKQ8uLXotBm41aRk1kekQSvfoy0QDv33wwk0s348A4fWeMFa8KYpuMie24Ib5zUEHs9gF49w/GcLNHhtV0UZ3SJHmU+FF++JYY7doiJmouidP4vorW8QyQVtIjc6ePE8I+/FQ9axwmto1g86H9VPJ7ow5qWH8W5I568mN8lVqo1RG9U0TPVg81OBS0jNopH9Wt7vC4O7Vzz0Lc2GRN+HyOaNPQp1JOnl9N87ZbQ+XWJZB8j+8IcxMRITBjtmq+z0jDISspYIy+/EkDscRUVz2p4qkZPlauv9QOsRA/WEanU0W+OivqbRrISA1hxTE3/RAWJ4wNoKPFk5Vpv4n7pEJPi2sTFGn/UWyVUFVaOLpZTuUrHxgYZ599z56zNyIhFMozVEl9ODqHngh6suOiHXu/DWYWd6sMa2p4zUqVz9STZwrlDrtoxZhqfMNC70kJtvYWhNSZil5hYOKhFZJfJ6C9peDJMycYUb9rn69gwTI9ip4bdZXr2TlQQn2Jmf1YQ6wtdmF1vJDpKwq9IyyufdGP8pjaxZ161mDpJRXmRCs1UGZu3O9kmGTnVHoh6diQDIxVU1xl4cpmNQn8TQd0cdPS28f49CUUvI6MCLGjqJDrfU+Obq2X5q3rEazp+OOTPiVoDuy9LvPZGMHGvWrhUYWaOi9va3ga+6Qwi4l8mRiyReOAVyLDFOuYv13H0qp7McisXoozc267k3EAt/4jQUZGl49IuA9M8NYze6ceUe3KCXHjo5uLXmGUGDGYd4bEmitO96V6soGC1isRGNbWxalLaAyh628LtdA0fz/NjyA96ZLs0jDkkMfKUN3n3FRi22ql8x4LXGQ1Sppz8hd0wXzMws0BDvb+CGW83iRUj1Yx37TW4S8HSUSqO91CQJvNn3iQjOXY/8tL1FAa7dK/JyqS+auKnS8waZWfcp67YCR13f7DQurBD5H3VKXx2yXgsTcXlXb4Y61087xXM4j42Gt+3s3CPg4yX7ISFh9KzRIeuTxDfLw8lON+J+zk9hi+MWJYa8MuQcOyX0By0477UyLGVJnrbJQ6fkXh3h6uHfXSczA8kqLedD1Y68F8ksS3TwPkeGsaF64mcrSGv1o1nUn3IG27G0mAjdbGdk99IrL1gZnG+hTsurtzGwr+vuc6xQMfCyd6Uv9whZAd9KXlTwel7MnJlKrYIG3NrJa7LTGRe1rFkrg/WfBVJz/iyNVRFeqiCNd39WDHaTjhW7C5tXpdhYfRpG1JHMI07Qoivl8jda6Rxi41+Lr1d0M31Lhw38WOImUETg+l82cHa0wYqB5l5YZmS0iwt4Xfl3C90wy3SwvO77PgVO5m5JxRlmROdZwjaGDufz3DgcPGxxmDnYKXE2Qot7S+ZGNUQwM0AJQNueuJj07LAV812pYP3XZg5p5HTuMSdMf28WG2vE+XVzcJ70xXRFWCn/gMn5accpHxoJV/j0o+TQZTWOWicG8qa5FCar4SxzRLCtKowHiTrmfKag9a2YMa5BZO528GxykCy9rn4s8rJ8xcdyMNsfG510G9sOLGax8k+3y6a6kI4v6YHs+Kkv2jt/7IbMwvEp049R9rUCY9iySVn/9TI2BQ7K5aYKbLY6ftMIDYvOyfuhRI230lco46fmvWsHxHA2LfU7M/pEA8eBHDlZzOzjjvwWx7M1b+biDtpZUZkKF+ol4qPCuwc2OYk73pRQsaGZcL9IzNvhG8SSRt9+b7Ki+L9D0RVlJb6CY3C1xBBuevtbLiu57D8iChaWyLS7taIQ5uMVPcPF84Di8RbA0+JvPP+SGc2ity0GnEwrU3M+XWdyMu082lmNIfc/Ggu9WewzJPLn80RITF3xKYId9a/60O6kJNg8kLb1Sw6on8S5qizYk2LLOHbntqE0/saBGcWiU9unBbxvzriI0ZliSl/2yte/XZGgv3WMRHR0CVeV7qh8vClZnV3MqvdUR5oEdG2UyJF5UWOypPOd66LmfoO4X/DjRefqhN1Gd3ICagQvfQbxIScNlEVr2DkNTkxDaOFOa5TTHi6RZRUNYp7tR4MGVIlElbdFLfdr4qX6hpEU3ujWN9Lx7QLvlTF6mjx8aW22oTTqWZopy95W++KyRPviM82d4gLP3sxtp+Mdg9vWmuUGDbLmdym5X6DO+Nu/PGf2v30YfGbXsnY3sb/i5Nh3TPE9FbTX9aMn/iHPygrTiRu80DujBZTC5clzM5aLAzD6v7EUfrFMqEsSBP/WW3F+cpjCbv7F/+Z65t0If738Xk3IU4tOpIwvn6PmJoyS/wXIVx9fg==TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lXlYFOQWxmFknRlmmH1jBpgBRM1dBKPE5vupYdy01JLETFNcyi1vF9NKUERLTFHkRkWmeFVIMTE0d8NwwS1xQc0ERcElIkFUZLtTPvU8/XHv+ec833e+5zvnOed93xP5wJPFBTbyzhnotbBN2Dqp+TUghDUjTbi57KVjVra90SaiOj8WecVnxLPjPdiboEed4s/8uRJefs+G5LqGghwLzlU63moyk/emDz2MVm6dNlPa30ImMnbl6kn70MbMaglF+UYe5jlIn6/EsN/ItTCfP3Klr7Gy9YVFom9UqfguYomQrb8nsibJ/4iN/FTO8gYV33e2of7OgMcMI6cDtbR+60tcsR9ftTSKkT4m4ifLKB2t5N00A9/fUFLuoYLuFpoCfDmXpSQ/XE9qoY3jfaTcH3dT9H4/jNQ8C61hJoaXabg+WEnHHXpq3fX0ibSyv1TPMV8TK1fryO6mYEO4J+uOaag/IuPHewY+6eMN59xoXO+Hqcyfo5cV/NbZwr4Id0Zs7MBKhYHgZAcVOYGkvWN2nU0MXBHAuQtGUtI8WDPWwpFTJrrVqvCb5MWdC3qKp2uYovZj88uhDHzLStyCEJ59YCHbV0r+BiPX37axe6CVi0vlNFTp+ChXzoxlW0VZs4XpDXYiU7tgfqUzZoOVmnMBbFut/6OHf9qh0WbnkgMnhcLHwdVqNbuTdVxVdmL21wbaD24Tg7Y2i8jXvFl2ok40rC4S8Z9cFmU1l8Wm55uFzKtBrBzWS0RPKxZ5R98QSxO/Efc61YgPpklp3HpK3H3Zg/71j0XuCQUh0/0YMbYD7y6V0lH5lfgzf33RGPHxlfVPzo1m5+/u0XJX7+q0JCz0Qd1aK6780iKGFusoCwugIlpDTZSWCUoLCw5bSDis58VyLV9sUlDQQ8P2bB0JVXqUhRbesfnz8wIls5QKtvUz4Nyj4UCEnNhoH4LuqrmUJeHLRx0Y6PNQhOY0imybkqGf6wmbaeDsEV8+WazifqgXsRdbhMcQHd1yPTkwUY/YGkzor3amPVTQr8yblCk2YqJcXLmk5+5gC74vmah/6M/5fWbGGnXUTjbTcXEAzfuMJL7qmu/pejH0OW9G1vqjVki58Lwne3JUGLpqMC7158J0HRMPS9mQbGLz/kDaupq4tVRP/R492dH+TOzpxk8/1Ym3blwQEbtk9HpajuaqJ2FJdkZ103FolRWltRN7XHM4NcvA6ZYAYjRm3EcFEumq5dtUPdcLDex2YbDbLDOrdf6Ms6vp+oOW7ls1SHMVbE/RE9Rfz9RzNvoVmgmMMbM8Q8fyW2p+9rCTWWRibbmB+2k2Lpk0HL6jIaW/jl2hVsbc1uEskPOFUHN/nZqKr9W0n9QRW6Zi5hUF64qkePf04+k8JWmJOura1CycasB/mBfvV8lYliKnfbM/v8gUXKrUsGyUibnZKvwDFcTN1vOjREuIxEzd2570HO/Sk7etLHxkZOIOJQULfOnRW8KZMzo+H6Fm1Wty4h7VivtH/Hk4VMfooXJ0cj8K90mZMtGlDV2MxJdIeTdZS98IHeMSzTTf8mNNVwPrb1hpWKFm+AwN7rfNfOx8JJJPPRbxNi9OZchQ5vjy/HsqVjcG8fQIKxOn2bjTPZg5FVa6O8L46J6OythgBo0NpaTRgY9cy5wgPSse6VjcbMLdhWn9U4EM2uvC1loTvb80Io0zsednF/6GqAg7bGXgRTNXVVYyfA3MnaPj5HgFdS6OJm1X8qanG8dTfWkY79KSfCsvHbSxS28iNslCXIaZZNfsJ9dYqLNrGZehYYHcm8z0FhFplLJkn4yYGd40fC7nhMxV+w0jXhIT8zpp+XC2FwN6+XH8jJQDX8kIz3L10IX1/AIrlbPNTPo4gO2JAbwTYiVgrYsLFSEEzTaS3mZg/xAby4aYSNhp5FODiaR2E5LddlbMDMYx0sCB/WYSVDLK3bV06iBneLobX48w80wfK9fcQiguCcVkDWXTd3bCC62k1QRizzQzLyMAww4jWQ9UnJhgZOY/NETO9cUj1oOSPiqSpEpeKbUSWaFky0Ep8yrbxYjdHiwprhKPH/8m4o9XCe/rLh6ctXM6OpApwRbuzQ1g0JFAUvrZcNQ6+LTJgbMllMjRDhRTwjnQWceZkkCOfWknbLCDNWeCaXEE4fOMkc3NoUjyApGdtbB5mI1iZxiPnwrn5rwm8Z9tDoY77MQvMv1Na/+XbUzOEvkTdDT09Hf+eTdm3IW/NPLFowFsaTViz3P1QB5ItStftV8YS3MdnFRqef19Hd9011GeqeZiTauInaxl9AQL/1oXTOBVO+vnm0gNszH/aCgtstWircSKtzmU0g8KnNot/xbZia59nbNBRHeRIs3w4HTfZrEnSU1G201hLe/M+TEaEiZoiaj8QfjZdohrn1WIio0GVkSECYk6XeSkl4m17kpazq0XzrZK8cXuJhE1aK249qqVw116UTlMySgvF9everBl7iJxqeMDMcHPjXuve2MMkjE625PbLzSI/nvPiz7h5aL9NXfn4PD453ZduSXK9qaK6l5nRcPs+wPyz78uSmJ2in6qfzpb60pF9u02Mah/q5jRz5cVxzvQUy3B1lYvVraeFpYBnmxf5dqpbbfFxkPNoravO7KqmyLnsYSzk46KVzNyRe3UR2JttYy94XLs0kQRoWwRsyQNYsCLd8XFMR4c2nJK1HS5I1qnVgnf3rfE0X01YuxlFZmf+VCcoiIlyZs5BQYiQlz75RUf7o6sFxfDGsX5TU3imxmeZDV5cyfEm9auflxRyzjk2hFR990orH6Ci8yoEuEZLaeoTvN/cdI+LEHE7Tb+7c1PB5/8sfhkjJik7cC1p3qIK4uynN3aVoolVTV/4Shn0QFxuH6UuDkngNzKjs6usdv/ilkKrz73u//+1xhRU1bqLFq4U+yULBT/BXpBi4Y=TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lXlU1XUaxlnkXuBeuJe77xcviwopopIJGMr3kzo2phgOZW5lueGWa2Ux5a5TbkUCepIQslwSBMncN3BUnDRFyC0PIGiCLJIKhMxPZ+qc/ph5/3nPdznnPe/zPs/zGifImL7Yzs6VRnInPRK7pmioPuOi8QMzbk/C1ZlnP+gQcVNaxAbvH8XEGV7sT9XTmqbm+x86RPY3Tj5fpmO6m413h+oRlRby93mTlGXjxZ0WDtaYiRvjQ9c4A9cf2jk514NpYRaqGoKY3EPF82Emlod6P611pcVG8JxlYsimsyK1bLlIqW8QiTmKp2+xUg6fpiHZ18Enz5hpCDXhM0bHrb5ybv+s5OrI+6KPxoTmIwV+y1TMKjaydrY/HQfVzDNYac2U41OloibeSEKSg/Djvhy11wivymAitlspftXCmFotO83+5JQYmL3XwIn+DqozjIRNNDP6gIFR15Q0RXTi0VQNh3cpOT7LyIAoGQZ/d7qrlLxYqCKyXknJm1ambnwo/Nd58m2Bgeh+LmonOoldZ+bcARNl6204V1gI2+rJtHNWemZb+Px4ANZAGavzDHyyXEtcmhLftBDaz9pJt4fylztW/K96c/QLE6N32wn5zI7veF+OddPzaa6Ciwd3iUHP2Zhz2kViRzeOnQ6jvMpO3Cwbb5w3/GeW/w1lmTV+9/wSsWRlENt/0LCiRos+pyujs81cu/2taF3eJjJOe/NO+D0xsHuBGP/VVeFfcEW4FG2ivKZJ1C7oLjKTjojESeNEcVauKHunSsxZ5UON/Qex5KwnxwseiQf1foxa7YdXuCftGb5EXt8sfq9feGe0GPT6F0/PKS+Y45/kbTY1Yef1HNJ68+bGOrGpe4fo3aZHXm3jyr80XLikRd3JzpHVNl6rN1J8UUf0OhUjhIbUMzp+2WNg/yUziTo1I1/wY/1NfxalGEnW68nfKSf+JRnNw3Q80+hG6wZ3RuyqF4rKRpGQ5o8lxsDdTXpC07x5nBXAqGkyNo5vF3ENOkpLPLFGmjjY14nsAxcJQ6SeRstJHOykX76GiPsGOp+2ktXHSsJ+DUuWWYlQ6lEk2KhV2zk318TS+Sa++LxRLI6Ws35vAJvl3kRf8qR0soaZgzWkazWMrdDz0xxf4n4zM+qYgy1VZsLvGWgaZ6A2MQD5mHax9EiteCbqomhRKEmd7Ufwr568dySQorV6xo21IwK78leTL9cHGHn7sZ3SqRZW5QWyPdnK0RQDu/KNxNy0ULHXwswrKmIPqDl8SUfQWg1/k+ZV1aLnmz4GJoY4mPialW6Slssk/JIOaBl208WEECuBs82sz3NwO02L+1EdKfl62rPtfJ2t57kAP/yPaYmVaVBptGy0GZjZU03Ze/5UpvjySJqL90Y1q5boyEnRMHy8kfJSGR+e9UFepCBluJpFHyqJKdLQ8L6J2MoAqo/6MVnC4NOJEk/zTVyc4E75fl8ejnHy1ssmwh4riVwiJ3KRG3n5BhZ8rObyJiXl6XWiLUvNjA4t3TcpKL6hYFuUL0PcVGz5SdJYTxn2Phq6FusZLLfx3Xk/tkw2IIuyUXJAx7BZKgavMfNwxgMRM6hN1G3yYnqED3/fISc33Y9qWSBrHtj4uNzOtqEOhl2wUbgvmLYmHWuuOrnwYjBH1rrIk/p87yfJF3ca+LDQRK9cPQtzneTe0LJ4npnAaSaWDTKRWGfA9r2aT5RObhy0ETDXxqllRjKLdPQ/r+DIYh07+qm4HPmbyF7h85TrBZIeQg86sHWysOW2hfoGC3cNRq5IXvGsm57YSVrSDsvIOdAmZg/yoV+lgo6tMlLOKBkjzWxHkgnrajP9Nkjakkv68PUj8bAvGRJWzjIl2joV57o52D/MRnuJxGWpx/Hv2/GIcnG3IRhnqYHXA0wcSnbw8UwLzT4SN0ZY2BZvoYcymLOFQWz5p5GICGkPyBXEdWg4JBR8NcaN+QtNNOXY+GVuMKv2hJATFkJcchDBrTZkd5yMHG9mc4qVilgjGatV9LlmIGiDmntD5QTIPei/XUXnEn+67LZiVKp4ab83vd7qEGMzPcloqRCZ2+pFwckK0XLeRum+zjSec/DKIiu3i20ICeOcVXaca4JYLdWtfyGEIVOCGHelC/Oj9eRlOumf4qJhdxAzpRn2XhHIiftGonuEMDjKiWurjRzJWycZQ+mxritNX7cKR1EwhyqDqFlr/JPX/q/wUaeK3iF6pq1Tx/9+d/jqpT88Mn24g97NJtyP23n/pJNBexwsvRWCzzypxkgNMWEGEgolDWZLejK1itQ8HTZJr48dnRmd7uLLFjM30+wMzgqBfqmiqIuDmOshDAvZHb/wrTQRu0PCzT9bpPb3oa+0097e3C5ed2j46EaNUHUO593hWirTJG0sLxanGgtEyms/i4QqA0muLiJj3ypxS/6jSKhXUXRnqzAnVYjC2BYxz+NL8U2djZizvZm5SE2n8SqO9fQia+tSMX/vA1Hv4U7jKRlzUxR0kXbd7rYm0f2XUtHlzTIxZGXLwPLzowZGb64WGzcvFcp/XBT20PoByeGvCmPMPtF8d1b8iBNnxGczOoRrVrsoneLDvQGd8O/qwcmoZvHG4gsiOcWLoqGdSPK/I2qlXXV5kBtaQ5W4tMOD6qmnhbkxS+w1t4jDPfzoG67EI3KSOPhyq4i42SBC19wWLVukf8HnxGxnlUi+dl00/VYtMnvcEl2VAfRaISfjsprgITIOzzQSkOnHK9/JGeuoF25bmkWvX5vF/QlezF/hTZy3HHeXH7u2K1B317JA687FHNNTXiyYekL41clJX6j9vzyJ/Hq0ML9j/tOf7ZLGn+Tnn+svKoI9Gbijp9ijTYs/lbFOnK6t+INHi5OOCr3uFWEJsJGe1i3+zsu5f7zFvH114JN8Jz1WbJ5eHL/w/l6RvvIj8W/5soIOTimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lnlY1VUax9kv3IXL3X53+d2F5SIhrokgTQZyPpk6pam4pk6ZuxL5pES4jQs6WZaTgrmgZFnpoGSahiOYmDouKOrkbooibgnu5kVgftWTz/THzPvPec55n+e87znP9/s5Z8OMYPbJHl64amVNpU9cn2tkR3kMg/zs+Clx+UQ0DSnNIv6zBuE2HRHqLcG8+66F2UkGEi41ifNr3cyebmLcA5myLhYujHAwPCqM6g0uKoJkVM12pvQLxRBrZdsdD3PLAxh+3k7f+1EMejuc1Ek2EitCf621a7qLiUVzROLI/WJXTZ7ofOyWyHdrf82d+FhD6YcmZrzpQvNnOzkBNqrumxm9VcXljjquVN0VozNshDq0lOXrcU61ET1GT8QXBrS7HUzerKJuUQRziiUu3XDxcyc122NrRacfvJQedOCf4uC7ADNyoZ64col+jRYm+Ll5zmXD2cPBkAlW3JZwxhQGMWqJiVvHtWSm22mzLoSqHD88M7UcSIpg3m0d9xKcjO7kE4sTA+iZKfEPSzSWVm6cQxwkdrVTNdXFms42fBlBjEt0smmiTItZBrLSQ6hcL7H+RRNvm8IJK46Ftm5Etxb0q5HxdFbTYbnEwUIXB20ulsWrOeORuHdVwyHtejE8yUVihJdtc1vyUV08d2Y4OZPsJE65F7//igQhp5/MOyimtPXy3jkjfVuY2Vf9FMOn2cn7sURse7pBTNwVSviDOpFatFH8af0J0TrrtCjJ8ok9Pe+I0B6thWvAd6Jt2DBRXl4iUufUiOk3w3iYfFjULQvkh5E/C/duHSV7dfy8OYBZ/dQUfFwofq//QkZ/saH3yl/nPR/b0n8Zh2/Tc/EbM42vqpi8uk70m90kRK6Fg2VOvP3NFJ8yUbLISf0JJ7e0Nq7vMnPfq+fESSOyU2JoopWFm2TGFkSQtEBHZbdwBjdZ6Vpr4p3loVREqvhAY2Gsy5+dxwM41XBblDfdFe2vhrMsSWKAot2Kt8P4a5CBqA3BfJjaKD7tofQTF0jyGStLkj30SY5hem8d/dequNbbQ029kf61EoMbZTb3dyD1M7I9Qaa2s0TMRidNRU4ef26j1XIbwZduidNTVUQdMnD0LRV9Pwwi7YiRgpFGoocZiZwl8Xm2mo4umaD8SNoPsFPYKFGq6Dyrv4G0cY9FecoNUZRzVMzN0nAvR0vu5CCSekYSGWhhXbabBWlPkfKsmj6pVuK/cGGttWN7y8P7A5T+/Ky0uS3xUi+ZM1/aWfVQj/qnCPyOmbmeqfhXCudmGwnfXImmchdXrjlYXeHggwkWZpw18cyyaBL22RnVzU7tfTfnfSamjDBz1yvxcr2LLgMlpu/RcnKcmZNmE4NlM2klSh8LDEwYr6dpgIb5Nh3d2xnILbDQ9I6J5iuKN2JDWBinocmj5eaKCHYs1eG/2cyMPDvb40y4ksMZsl/CO9nEtVIHly4EskXx+IrFkagO2ChZEc6Xt0I59MCfl3Ot5A8z8M4BDf+eXS++nRdBUJmZzIUazp7TMECtoUOgnmcvWZEdKmbNNPKM4s8x82X6xoTzeQcrBx/JXFli5JHTwKSP7NzMuCeiCxtEx8+C2Ryj5q05Krr20nMuJ4qxtU6OrXAT0zZSYYULY24sOxNMTE6I5FxzLC+1jGVNlZFSlZX18620u2Cn7fsS3ZMiGbrIQtFzDuavs9HnoY1DE63kpRnZa3XT5qSDrOUuls61cbNUYmO2hjmzLEg1et7PfizkJDWDgh2szVA4sM/Dtet2OrZ2se2+jKXZxh6clBWbafejiezvVYRUNIrKqWEc266lukDx1yUd2UUubGo71WtsaFLMPCwNQV6m47XdGlqgJecrHc4MPb4yN+YbMtl3nJR2dbEOFztToqn5xItttYT80Erfd910UNhW8bydaX9z8Eq9wpG9Mfg/HYNtp0So4o1eCRrubjCycqmG+3f8OGq28fIOmW2V0QR+6+VLu5edt6MZUuXC296D5WsHt8/LrFY0kjVVz+kTEvHXI6jeHEJFlT8Te0RQ1hTOqR9lukfqufVpKCN2Notpw/x5duRFcXJsndh1uFo8rJDZuDCaCY9d5P/kYFKKjF4bScNSF90bvQxRGOjJjmVyXDS++Dgur7Pw9Cg3GdoYFppjKN4bxfdKP+MVzST7vFzQePAImXNb3DQ3tSDg5lOsnNkg2oXFYnXGMGP8H1n7v2JqXoEYXWRhdZ+I9N/X9qX+8ISRc+85SV1oI2GWi3Z9IjGddpPui6XlhRjKrpto/djCboOF4WUGJrfyidNBZrrXO6Amim61MQQvcnBloVs5ZywhrfLFi/4eOr/ZgqzDJenrtUuEd4rChNBPRXFtGG9kBzLwbINwZCre9L8qhha1pLzYSOUqC74Be8TrMVvE2i/Oi0cRyptVFCciD78n1MFHxLhJehrnfyY2VV8Qf8l7JPYsWSkeLHASEZLInHU6xmzWMakqiH82zhbezIfi1R1+DJ8Xwuv1agyfBNN64G2R/d5xMV99XBzV+rqs+jijy6qLtcJ8dbaYOvSI2JpzM23S4YFizqmtIjpkXPorb/xL9DM2C7/wx6JDXShBvYLZ+ZU/lbV3RPSJI6J+UzCFqcGMmXdNtB3jEyVKvUEXa8TzIwLpGbZXbK1eLSz7H4lRj7SktNZRvGeESB/iE/oR9WJkzlVRbAhk7YKDYtipapFnOSuycq+I3NgaIY00cOprFVJdhMLNYOJHSKTd1+K9q+JBpzrRfvw98Vquwon1wegqQxmaqWKxRkfUVg1vLjPy1RY/UtN++09l9v5etNkXyn7l3f1/+pi25RWRsv2PWkre9dse4TXPifixAaTmtxGvFSxJz6/6u+h2o/aJjp6ZWSGka4PF6EEy4RUJ6QcWlzzJXQ440+WXkfLOYtbovenbZn4jsl+aKf4Dc4iCdw==TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lnlY1VUexuHihQt34a6/372/u3CDCxIoipV7qPd8zFwKHXPJXEqxMFP0MRUVRcVlpDRt0qTcUscYVMQlx8xxS1NBUAlzyXRExUmJUKBUVOaO89Tz9MfM95/znOX5fs95z/u+50ydGUpoowfbO3Zub2wUDzPNZL7h48oFO0GBSPohmom3Hgu63xfDl5wWY/ap2VkqMberia2Jwbzyk4syrGQtd/F1oo399xVWL9QwcK0b+3tO7rRzsvpOBL5nZFb1iaI+NYTxqQoj5z/F30/p2dNW5uiCsCe15uo8fH53nqh8t1iM3DZfqKbUinPjdE/mciboWPq6hc8fuuk8wc71vTJXM63Edw3nSJqBfwTVi1EjHHx0TYe3ayQTy2X08yJxTjZxqJ+doV01nO9g5EimTKHbw4EdERxLviE65/k42EXBtN9BVnsLzVdFsr2NhGOgRL7kZlpgf8sXKtR2kEjeqMcwTk1xuJWDJTo6FjtoyAqjqnswqiIdj6YE8r9uIPiqk5e6PRLfFKmwtpfoNDSGohMeNqUr3HM7mIQLV4qDJYnNSK13kSM7aX/CROfuoUxaKbGh3oJ/j57MVrE0n+zh6rJYaoJdxBaGs+qelT6uKMJ7uijNjiCyyEbfDC1tSgvE4BQXyb1i2JGZgCwl8KDAzZcnXMw+Lz/B8LfImqr41/YuERkFMehLzDSbZ6XL9niGbwhwoEOhqCp4IHq5whm8tFactu0UiQPOi+Toi8If+0D0nlgnRu5PFG37HxRpqUPEhv7bRd7mG0J1QcMtd5mYdUfFwMR7wh9lYEkPPb5NIThrwtEOXyN+q3+p/RAxbP66J/3+3RT/f9o325tQldrwjNJQcuQnMb/tI9G9o4Qvx80RjY31dy3sCOA2THKxdrlM1zoLE22RjL1roovHxoedZaYNcrKl3EirCQYqxuqZUy0zUy9RMS2cWrWG67FWIruo+LBQRe7cOvFy6wYRO8JIReCO64MlntkdwalqE/Vlocw6+1hM9VqZbFWT2MlOr9FeZo2JIaFOjzRJQ9kODyU1Jra0sKN530mJw0VJvhl3b4WVeRYWhDj5+rFCpSSz/prM5kN1ImxnGCftZl7M0DAvU82lFRZU48ysuGdi2jqJgbcjyOmm8HxVFI39FfRXJKbvkxn7i4m+SY9FZdkt0WZshbhbokVaosf1XDNyjnlZprIxwechX3maYHsE3x2VeS3XRfefZOq+8VDxVye778i0iJOZmqNwMNhBrMVIwtsmLh+38HOwlVSrAX++jbvnJayLPLQKUpCvONg73saf51v5W59oLuQpeGMcFBPFL60thKZZSVsocaCZm/JaGw3tdbR+0UI7TUAfwkKnLRJntGZ6uCO57NWSP0ZPRJqRTT4bnX4wU7ldwtkmlInjIjhbpSX8kpFhTQYcD21UnVSwvmwmwhzJxnMS4QvMRLZyMqB5CDvOBzTu83JspYOyJCP1mRpmZKto9YXM7EQT0dVaDhTViL8EGWlfauGj21qC0nWsSYrgwZhIZmbYWTpDQ9FOC1cCmLSodfLwAwN1KTIL1ri4N9NKxmkz3xcotLY0iNOrG0XRJTWLayLou0RDqM9I5einuPSKB+PhgHemeYnr4KLF4jgaptvo0tdL5NlYnvs6Fvl9C2lHJK5EybRWOzB/LpP9i4f6Azaeba1wKtiOaqNMr3My7aLMKD08JBx28nGWB7vLzrYmiWKnnqafrQw4ZWSZ55HYmxrQ/TyF3D95SM6PIqe5gqOFiwmTFK567fTd7aS+c2D9Aitz08LY5X8o5h4OZ9D3WnzJYaR20lM40MXdfDvnAnsKn2rh5UFhLBxlwF2mJS9cj26fnt27InnUO1BjkYISyLmop5v6ajefDI5mYHkMa05KPLbb2RU4z9jAWRwHZEZJCq+/pBBniOG9lGj6PS2juBR2FEfgXxHgwldahirB5C6wMyCgm6xbXia97ePo9BhiVD6S09z07RFF1dNOvip24giRWTPIyAdvyGzMMTFsaCgDDwZT/auR7PMGts92sfqLSLbt1TD6+SaxPF2F+U6lSIivFbfVlWJ2QeBuC6LJ+dXDZy0V1hUpLLzoYU+lmy3DfWS86mNFk4+i7GjazmxOu3YBLxnt4ZtPo9k/9Snajvdy6lAUXxyW0c2K5ee4KNICPnNNHcBlXBzVHeN5q+y+8NT6WJ8SQ+5N6Q9e+7/iaNNHYpHWxvYXTP7fxm68WPG7R37wwMWpLx0M3uPi5lYP0zu6mTUiDovHx7BCC0avRO41KyMnm9GoGsUSYSWpRCG9fzTBGTF8JSvMXubmTmksk3XLRb9PPaxPj2Pvxm3+ghkrRcqPARwq1ouzAe21nKHihaUPxc7UAOen3BTHFyVSMtJCQrmN3FnHxNaOu8SvXS+LrZdkzvvjxbgbuaJyzhkxR2ck7rUN4p9XK8Wbx++LvJFrRaEz8OacfYYT5ZGU7zLQZ0Uznq3JEc0v14vnk4JIHxHGmmlaxk9RE3/1rlhU/Z1Qznwn9pobunXaNKhblblKXPxknkhff0ZUXa/u+mnTEDGm4x6xOSXdX1F4TFx//FgoixtFmUFDeSD3Yq2K9Gv1Iqn/GfGwnZrjEWpc7/wo6hY1ioS9QXRZd01Ub1eRaT8m4so2iG7e+2LEcR2eR1pWhb0pXrI9EP3SakXTxX+J6lIV38adFH0mXhXeC98Lw6EbouXJShH2iYmI7DA6Zhj5rE6NulYiPkhH8dAwGgbUCKO3QXgO1Iv3VqrRjw3n7P4w0loZuLlbh6nSzLuWYMrsyhNeLNUdEaNDdLwVbPm/PLmd/arI32z/w5p9z/43x/IhKeJM5xB6zmopNmUv9+ccXiakUVW/88iXd0jssb0meoa4WNIn3v+xbsfvczejfuj25K+V2VnMdR71x2btFru/nS3+De7VaiI=TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lXlUFdQWxgkv3MuduJc7z8AVEXIk7RlPCtg/Z5wzXw6JM46pTxPqqZWk5mwRiSmVPM05RU3JVHLKUkGx0ggshxwTJ1JB0HfzrVqrP97b/+xzzj5n77W+/e3vjF4Ryrxnveju2QhtXSsRcUY+SvdzO8dBUMDWZkWRNbdOFiTUSsnqUvmyMoTsPAsd7hqYHxlExwwXg1eYKBrl4sk9ZoJOOXEmq/iqnYdRA1zMmemi38cqchRW7i/24VobjGeYgz7johh2U8tIlZ30o6rHtcLWePCmzxbjM0fkramzZey3N+TgeO3jWLdsLd7OJko6e9kUY2fUMzZsCguLGqioXKjjpveOVGy1M6dew9HicLp3tJGTEM7mTkbmbrQxvqWK9I0GJje14TnqZW1nDWEf/iI7uvk52deJtZeDDtUmtFvC2f8PKy8ftbAq0436kJXnB9o5s8XC1ud15JsU+JqZqPpQS8MaG4pMJVmVQRxJ1DAAA4c76al7xUWxrlYGTwqm1UAr3Y9H4c7z8nNjJ8dX2tn9i4u6wHrEPQWHf3aRPNFJUUYEXSJDUbxqZfizJs7qtRT9FsOykx6eoCEdO7lwjAujZ3cT317xsPKom+jFYdxJtZB2T01x2nppcduJebOfz27Esa4ojvj1PlSpXpy9bY8x/MOeesWR2nNDiTRd6meB1cTUDmaWVMVSsMrBroebxJhfL+aFKlK23JB5tdvkQGy55OvLxVJdK0d+uyXPhbeQ0wv3yoExg2Tvwc0yJPmCfDFPxaUpJVL6eTCGtfelLE9P3+ta7BuD2XpIzenX8uWP+mZtf5mxZMXj/Um3K/V3/8rbBpr1tBAd4MLXITekbPwjufObheyRXo6cN7P4sJng59w4FW6eTrYxdLIJdQM9V7ca2dffzIwlFl444EA30Ej5WR2X5+v4V4mN0oFmeteoqLmvZFikmdYBNPccCmbW5tuSnX1H5qvDSW1nIzvcxrkXw9hywMg4k5L4px7K610sDJqmoFO5jXF7ffTPj2ZjZx2v5yrZONHDkjeMKLbZWHjORW2ciyXXjawsdJCgsnBc6aR9igtbkpUVs2yMMd2WoKNKZnxgwDdeyYIcBbHFESyJN9FnZ+BdEwtVGg0/BOZn2WUfbWY58WXYGBXgbsg3Rs5Oeijf7fxVHlwpkxi/htmNdEwdEsJnCVH0MVn4JtlL/qTGvBCr5lyKjaIvXGy+Zed2to/hX7mY+qSN+Ndt9GzvQt3XwezeAZ73M5JstvByvYmRLfX0PmRheYaVnn/zcqXSwZkODpamWBjymgmjOYq9Gx30sDg4cDnQk24mPppqZmw/K+2nu8mqNaMYqqXpvgi+GhuBtmkEZo2FOyfCiRqrp81SNYkB7Aa1DudKqZkydQQZi6yMbafgo5/CuFam4aHayLsT9HS7aWJeYwe1pRFM3abjqbZWBt0zMkLr4FFsA04WannzvBdfvZ3RQ/Qk71aRWxHMsu2BWZ1h4G6Ihv1Fv8rZSgPVWSaaT9NQOELD37uHMeqYjtOXrChzlDwsNFLyhpUenzvRPqunfK+VhW95qMo18WOugX6nHET0qpYLN2slJD2UgWvCaK1ToYgMx18VRa8OXta4vBS38GHMdaN5KYayYhPSwEdibAxnohqysEsEdW9aCS2xcG2Lg631NroEaszsamHCUDvrdwZmc5iNbYHas5xGsi4FsM9w07m5l9hHNrbrbFgPatG/Y2HLYAPDG9VLnVLNqIDWLVJ62JwQ6O+1gFb3DuiwysmNPCtPp7s5FpiPp/PMGKtDydzxQNqGq9m5SsPQ55RMb6IjeJ+LzCZ2lv9gp+vaCPLOhLL8Cx1zJ2noN0rLnFd1KC4GsN3v5laeE8NNN/0TPZzq6mX07kjaLPBz8N8WPq6ycv2il7RdDoJtdpZOd7K8nZO9SX4GK6NZPd1G5ntO3t6npvmGCMbP1XB5QhAzVlmJK3cy0BKJaYOfjX4/mWY/QX08tBniI/73njR082kgZ9K8cLRJNvqNNxC9OpSqgidYVWogcqaeRdFu7rUJZ0ClkvP6h5JoCKbljvOS8PcquVBwTlZmOfkhO4qO0R6unXew0+ege7qPtEIPZWNj2D85hldrG5KT78eXF8usY0YKk32M+SmK08HRSGI0rTyRdK2zM/nFRswcGok21kPScQ9S0QjjM3HkZD+QushAv11+7lz4q9b+Lxu39D3Z1NTCmFJj6h9nk5O++1MjF673sr6NizNL3agqfBQVe2iREcMb6/xUjTOzb5eF4iNmcj81Ujv0gcQbzTTPdpJri+bW836ajw78w+s9xMfFoN6ZI5vue3m4K4aK3Z+mFvR8Xx695MLeuUAqO2g4UBeMfdcDGbnbSOT2S5JRE0/HuSbmtrMyYeVBabFym+xQVMqcwD9Y1CpO3m48X8ZM/Fbaa4z8U7laJh25ILpLNZJVky8NT7jJdiSwf7aOJV/qaTxDQc3BbJlWf1uaJz2StGZKCtI1aNJCmB59W0Zf/V6uek7Jug9qUtb17puyruCiuFLeEmvISWlzqSp5casBMviTIknbPSF1wbSvJa3/I5nyTp2snaKi32IFHX8JImxZtVR0Oyk9moVQ0kPB3dOXJK97jRzeG0TiuAsydWSAL97DsmtBgVRcuC/mgB68tF7D9D3DZWWTGom23JAFmZel8G4wr/14TNz6s/K9u0J29rooH0w5LxN/NJBYGMr+Hka+/DyEsBNWDNd1VKWraBl2Qx40q5Y9X1dL0OQQkotUlAe4OHeDDvVdDd0VETTs8gTabc7HvJg57aAYPlGz2Gr+vzxx9x4gU+46/nJHE/rfHGnr2kqPNQ0Y4WguTUpyU8uvvyvr4q/+yaOyRftk0/YBctbr4nB9XOqJXVv+jOner0j53be711bWTDqUuqPxDuk6bab8B26Rb6I=TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lXlU1dUWx+HKfAe43OF37+/+uBcuMmg+xzKHUOF81CzNMk1SfKZPTTJzfJqm5pRiRpJCKhpQSs6JoQ9xpBTNBYo4hE/R5xyooAwaksj75Vq1Vn+8t//Z65x91tnfs/d3f8/xdT5kzHBhjrTx4rUnYn6zkRU9wxntsuOh2pQMF1lLmsS28l9Fys8l4ni2N7sarFyJDyapryedshTuyWa81ytEXDITbZcZqfjx/D2Fsm0ygzY66HgzgB1rrPTo40RTpsG1wIbcGEYJeq6WS2im+T3L5Shx8n3iYvFF2yKh0S4R0z3vi6ZJ2mex2sM6cvuaSA5yktPNzulzEk8nWBgU78fgQzo+HfBQnF5rY2udlptJQazea2HcVwbmtDNS+r2dy4P8ePgokJS2NixeobTM0DKk+ZZYtrElyZ4OFiIzSn1H3uVADOutTJxlpb0phM2vWAk4ZGNDvoXJKt5zsjeDO5iJV/P0aWdn82EflNEepNToSFLv/80rkDZ7HQy+1yQybrcgYbrExl/dVOlDWbVDJmaAjXYdnIw7b+NpsRcL62QinHZEkZEXYr1pWGdlSCcTExQ9OwdHsCBR4Vp1OL80OzjWPwDfEyYij4Rw7AMnIxP9ubzSgm6ElkNLt4lLZxykrHRzpUtrJkS1pryvi5SeTg5nWZ/V8A9z+tvjOhSUiFv2cCqHm/i0hYVhG6L5cqpM//PfiaPhT0TrS37YYh+ILmt2C+dPF0Sv18rF+KuNYue5GlF25DlRohwWz0UPF8N/yBFZ226Ko84A7BNOidZ1Gp7GN4gF+/RkddYz5SsNNTlapmZ8Jf7If3z/2+Lgrsxn63mTlbjf/d03jZzXWhlZ5sdHOfeFv61ZPFxi5fMtTgpnWvg81szKWoUst8I7PSVWe5oIbjTwcrCRsYssyJkqH5bJ2KqCePGxgRVq77tflzhVaeZRnj/P3/dD01Wt62YNubktiE2uF7d714uXsgNJHSGhlFmJLvMnUTHxwWRfRGKT0N4z0zTMi0khNvpWuui5K4yR2XpOLfZl5w0nvgHBHHzNxhGh0D/WQdLaYB6o/P9soJUn6TJzoxSmZatYR0t4FNQJ61A/js030m+mL2l5XtT2CWZgqImA9sFUjLHy96AAimtkCo67mHpE5YSQKLojceB9IyGLnooT7apEuOWsOLdXS5sX9NQM8+Y7dxgnEi10DXCSnhpNXCst+ZMkZsxQWLjOxodDXSSEO3i5i8SW5RKTRjjU+ZS5eTGQlyYbmfuDhbuTzFRvNfDqZTPjIiXurHKy9lsHT1+xs7ezmaQVJoqeC2P5KTujYu1s7+Li7BUToXvMhDRYeEvNNeJzM7OG6qjQmNg4x0R6ejATT1jw8jOSnGpgXkAA42t1uK4HYg20sC8lmAwVUyezN5lxAax4rGXfkyDa1Bjwclq40dFOwtdBHL6jx73fyhG1ts6PZGap+rHPoWNkqZNHN6yMKTdwodYX9J4UmCTcP6n6NUXH1rVVYv0EI9+r2Md30lF1QEvyj/7MLDSQvF+iW40/wWNM1GRJzExQ2NBkoDRNYkduCGejzCxWexOU4uDslofihYxGcazIh4kWVTOO+dH1SiCrL4YRc9rFiPdclCaHkbRTIbdlFMGLzHT2DGVuVgSPZkcw80IwM0Il1u2w0rBbZs4bNjalO9HftTJnno3sBom5ZlVr91jptyIIV/sQtgxROLnVSe12iWteEj2+0BM128KatCCOzHoqWlX6U93ORqgrhLxv1XM5EkMy7IxcZUO/20pSvcJUu5nUHiYyh/pS0vqJyJnij/y+loNPfFhbriN6rANFttO+WKKst5lTeb5MKTAw9k0tO1StfViip36ZgaglCsUv2vEfp3DmdYUih5PBo0O5/qGbt85YaPrEimJwUnfHRnO+RGJvmYRlDoKmu9kUGkZ8o5X9BepMLA1geZgJt0bLgDUeZH9tZdA5mbRTYeyXWvLJwnBO57v57h2F/EQX05fKTCiRqVotcbQgkPRWErowI0VzfNjzrtrraUZeG6XnUpDC3XcDiSn1I+/SU1HYUcNbba6Lj2OrxYGLV8Wy6zIVPd1srg/h3VwZZ6FMWIWL7v9RMGyMIO/VCEbVReA9Ipzs6ig0KkaPr52c/DaMLf5ucg+GMXleGF1MNsZ8EMnuglCimxVGLXVRfyeSidtbse5Io7BfaMnaAjcrx0h/0dr/Zf3saeJ6oZmKB4Fxf+y1df38p0ZOzHRSqv4j5zUh/EOd7enRTs6LSH7p2pLwBDN1dlXzDBY2lRmxlT8Ra3PUvX/K1A538++GcLbOlHGdCiFoRgT97qWKsT85can412fsjNu3a7XwbaPg868s8TBSy9uuFgyzNwm3bzDl634RudtbEznbxNx0K+d6HRO9h+WK5SXlwqX+uS0crYTBuExExZwRH/c00m3+RnEh5aaYmPZYFCVniHxvJ9VdO0EXAx4xBvrXe1E8dbFIi38kKpd6MKnQh05JWkJ3+DAtoE5snVYmMp8vEzWWR7G3KgfFdii+LS5/sUh4TDstjh6u7PXDrXjRsmKPyC8eG3fz8HHRv1WzmDOmSfiO8qPtbG9ev+pJj811YsL1UpF53pshv3mh710hugY3ikvnPVj+5Q3xTVcNDDgu3iv+RnQb+lismqWj5yYdOT+OFbWHHotF66pFtqtCbDitoXfxSVHtdU2Mt5SLHRtuiyy/G0IMNLLgog+zY4IoDPShj1biYp4Or12+NE65L4bH1InSTvUivdobZbAfzZk+9G7Uc0yj48HPJroneCItlJ/xIqf7UeEeo8XdbPq/PBn4UYLY1Fn+y5nHqY5n653evUSbNi14MOlvYtDoNXFVvqli1rnKP3nUUFkg3ug4TPj0cxCfHx0XI+/8MzYqqTz2d//ZyRhhnlEYpz+wR2xunC/+C/e1hA0=TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lXlUlWUex7no5V64+/7elcsiEiCCORWuwfOxcMXURAwVscZKmjIV1ykTTXGrRnFtSFzTzF0csnHBLCU3MHfLDTE33JBFtrk5p87pj5nfOe95zrO9v+f9Pp/f9/3aoCDomIcvdBKO8gZxQ2Xku4BWqOfZ8fNF/AIv777eIGyj6kR1wwmRulxOwSwrfl8ZiM+Q0XmMm+QzJvJfdnMpxkLwaTtJJQqyJngYM9hOrNLJuFIlf1NaWbbMTZ9p/rz6RCLgXBgiUcONYRL+uxVPc7X+yINn4UwxNb1EZM2dIf66pVLMHqZ+Otdeo+H9OBPdrnoYNsnO4aM29hwzkyUPZJPQ8M6KKnGp0Y5Hr8GyWMcIs8TIWXo+zDdQf0viy9hABul0/FRn482KYNr3ULFnb4U4XRFOt41O1u6z433fTIdkHfeLrLzxsZXC7m4+Hmtl7SsSK3pb0HXRoO8gZ15XE9dSVfSusGOdpOBOph+RPdQY7ugZU6Ph7W+d+OX5MaWuBYvcNo4+G07ZYS/uUAf1Mjtr3nEzwGGnoLkF791zkJdrZ8clAx84A3AvsxK7ysjhPhq2TwznwEUn66+GoXa4uB4fSMZXVqozPDRUeRiYG0jMEAsrn1fRw7VRlBW72NImjFuqaDLaRbNsQzCRgz10nmN9quHvMUptT0prOiZqpoSzt9rEuPMW+ic+w6LnHKw0fi0y+jaKn3cqORtSKe7EF4qSuefEqV8viCm360XuFw/FI2OsEFf3ifCJQ8Uv2VvE6l3XReN7QTS9d1zcm9OSfadrRU6qluGTNcw55Y8UreLOrC/E7/lTyl4Tut0FT/vxHRxJv7XZwwysSbewYncg7TPuiTtnmsXEQivzozyoC81U9bLwcms3qbkuus+18VaMmaVtdJR/ZqBGZWHuRCttAxys62Vg8wItp3Zq2TtCousAM4WuILS9lPS9aKLTEH8KV/lzJuWRyMp6LDrt0fNpSx/7dVYO/SuIFvkm/jJVSUU3GTXhFlI/lTO7WEKWHoL8n6GcvKrhkFzJpVI3s3IMKHx7f77uJGW9k0ULjfT2aRixz8L5Xk6iOjohSmJ0msQPxioxtFDJya0GbmqVnBgsZ+th3/q/m5BdNXJ/o4V7ShXfHHdwrKuXH6Y7ULxgo9hP4ttaA/3yG8XkQb77OFwmQj9XsWK1huXJcrqMC0EqsjD4soe2xiimLQ5i8zYbP2a6ca+zY18SzGcLXdQg8cqzNuKHOXmtj53HbfT0+dHA7psWpGoziT7NihMsHBhoY44imOf6Ohj/wM7WZhOeUhPhpSGEJTh4rYtEbUMw+4+YiCo2kxlnpZ3TRfkMM6YENXEXjFT4m1g9y0irOjM9fLWoWq6hLjmI0TG+M7t0qHMtxBwxEJRp43qOnB17gri3V82qjgY2Nmup32EmR9iJjjOyq1LDzA0+xvVGtqscOKNb0N1fQ86KYHJvSKwfpiMrSon1gox7Ghstbhm4PFzN0NRKYXhWzzajkVE31dwdqSJtXSCutjqm+9m59mEg126amBtqJTLfSftYHf18377woovvfe/Z4KvHhZUOQmTV4uCUenFtUACtlgYRfVnJfIWBMc1eMnyan9jppnKUl8h1bmSTI3z5zKRO8PJvVWsKB4SjesFEwkIr7/5kJVlr543HNjpO9lK13sbp3hILzklcmSZxLtPKHKMB+343Sx85WZXtwXTGRt8SC/kttWCysKS7nk01jeI5UxAZHolpZW5yZrrRpkg8bLIzqtpO+H4r520u9gZaWP/QxIvDA8gyNIjqg4F8eVFFB4WCN0+oabjvICBHIvqhxMkqH38yBQnHNZQsC6LEp1f/bA3pt7WcG+9CPczBkMNOvNcdeOpcJP/gZc74MHo6fb5y1crjfDevnrUz2mojZYOD/Wm+50Yos31sqkutmHVOchOCeDjWxB6fb65J9mNNhERmopMRtWEsd7QisHM42u9CaY5xk3XLy/FPnVT193FVJBGZqGdAtI301QZGlgbgnCaj7zU9pYVa/tHDxWSvjl2xSh682SxmXpZx6toVsXbnXdGn6Irw7HbSa34Ig+56+HiEi0WVTlZHhVA03MPDuFbUWyLokB9BvyVh9C9uzflsM+/O95K+LhS/MaFQF0b6thASh9jRj2/Nk8shrHV4MOZ5mDEggo6/RCKLfCLamlrhlcL5fKD0J6/9X1H+UZ7Qfea70yn6pN/Hep47/YdHRg4J5q6vbo9tchF21sO4Qg/K3hGsbB9OuzZm4t620Hmxr3a9Br7PaxQzbpn5wMfxjyUhpGnCefWAnd5pHjZNi0BbkCdemBDMhAURpOduSfI/uESklrjok14gotqrkM/wZ1ZFvfjcZuDs1nKh9f0/vM+bubjRiib3oGhq2i6yl14Qt/tJ3MyOELcnzRZ1pjKRMENP9Yy14oK1XMi/qReDklaIAzEemuvbofDXk9VTz1Ehp3h9jvj6mxqRcsyPlSkKuivV9OwWwDTVI3Fy1CmxTXtGNBXVJhbH9EtsvndDJFVMF6mvl4lndtx58cqmNFHec5fY3XdUUo7isIid1CRe6tQoqqcGsu9RCy51kTH44EPh6X5SfPKJnMAlLcm49KuYMPaJmGOTce5CuVi+0p+F6w4J/cRVwpJcK7bHq6mZqsY7cKS4/HateGndfTHuwa8ipWtLXrYdFQVTrogpKT+L4sQKcbR7uQio0xM8PYCxb+nxby+ndq2VuUYNZy8qeFF+XzxRPBYUVYlXjsh5w1/J+0MVnF2mwVWm4ky9Cf0JGSPyHU+5MGgPinnbgsgcYv6/nJQHDBGdPM4/rRm9+b/90dbO4ss+LbB54kWPvMVJ1+blic4Xb/7BUUjlfhGXPlhs7eImUR2VVPvBtj/mCjafT/ytPXK7k/jpwaGk/gcKxWWf2/4HLgxx9A==TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lntUFVQWxgF5c7lw3+97eVxAFEmlosl8cX5OaZqgKSJZPkA0JzWVRDFSgdQSRjQfjZNSCkoqiYbKpJgkGCokRGoaiIaSKCIFgkgwN1u1Vn/M7LXO2muffdbZe+1vn2+fxTecWTfWhy/adHwys1vYz5CTdt2K+wQDdjZJL7JgUHSLEN+HYoNDtQie4kzeN2piUbDCzoH6GiPvNioYZzGS9qyK4gA9X8e4ElJsJvmslv+EG9kc7UbZWxoaJ5k5VehA8YdaXoryxaHLk7QoHYGnXB7HOnfOzJmUdJGwolykVKWLD+a1iOyPJY9931/x5PxeBTd1PlRY9MxZpkOfo0IZ4kbFi1IKzzwQM213DU+U8HGKN+WhGkq+8CbvmIwtCj1+u1zpCfNmUK2Wd1b6kDTGgxNLG8Rz/gHc8zdQ+1DHgUNK3tvmRfhBNfmeGtZkGnE8qmaKTI81RMWE3Z7seN+JXYuUrEqQIMbpqD3vQlRyr0j6SsIOiYxBtlwWlxsY5mbHQBxZd19F4SM/jo+2MKmvniV6PReGmvBv1ZEV5UhqmoE+Ljpiv5QTP8qZM9VqPJrkTFR5Ur7Yyv5hJjxXW9l6wUhlvRu5OWoeXDFjfN5MW7g7Q6Vquos9aM/fJy6+amTsTX9Edn8alwfzs8nCgTAz62rVj2v4h5QkGCK22V0QpfOtdB1W4O6moqBfPwK36Lg24IDoeLlHzOnrRkZni0iMPiQWZl4S+wb+IM5N7RK53/4sjrX3Fy/cPCnsEqeJG5kHRVBpgzjU6UbQT5Vix8k+eMQ9FN9FenGhyZOZx/qw6VsPjm7OFn/E31MbLXLzdjy2p+80Rfym339GxqxcNSe/dMPw0T2RPaBXhDarSbhuZv3rarIeqehoN5K10sjsbVrq1ikZe9aLpWFy6kxqguPUnJcZmJckp+GyF8vDpVw2aDn7kgqHExLuTnVjd7QC508duPyeI3VPtYmhp9vFiUxvBizX0jZHg5vRg4iBCm7NdmHBaTvG9VOR/J0TX0u06HN9eeO2H805UgYVu9IhsXA8X07zeQ37HhqomWVgppuC7FN6Fm1XsVRroP5fRnpWaEj6XEPexl/EmFmuKMfLyat3ITHGifmhCppaFJi+l3OgSI1Dmjtv/2DAONeHUfMNVNriHnlSi1OejND6HrHZ9Z4Y/FG1sHeU8LLt7Xwy3pmTy3z49xUVFc1mGvT9iH7FnamlGsIeGBlsw7M11cL+owaSorSMStTQmm5An65jwXAZr4xXMD1BzRsNSsYnS7ltUDMpVcNSVwslvXoGrdBT8q6ShbFKot70Za1GT3SkDv+FFiKtSnoHqPg4QcN2q5Ep25W0/kNCUaucYQkKDpTLkd1VM2OwjLFFUia0uBG+VkJQiBcKk4raezISCzWM+8WJ4Em2/pjvSedOGfOavSjaqGTzq3oqVss5PcaLf45XE/OrnB2j9Yye60jvl7Y4wRbuH9YSf8QLfbQr0R/YEx+n4mSWnKh6Ca+aW8TkW97UdMrIsdm5Fg+kh91YkuDF4GQdF7zdOfGOgrbtGp64bmDjj1Lq0rT4upp5P19FZoScu6UG1hx9IByvdYldGS6wzIOOYa58FuRFWaIva3zMjGwyE1Lqy4JKE6+FBiLrVHLEhkn+zgDqkq0UrVfwXKGaTV5qrIt0VM3R8rd5PsRf0bBXY8vjkpa3qrVUL9cQc8WbnOlmEp8wMeiBiTcP2vCaomZ9pA2bgUoqRshYvLBHpNS5khWnIyfdxJGLJoILtDZu1bOtWsexfWpWbzdisNXwVxtGhdedaSvoFluHu+PSV8KavS707ZDQ9IoB7LT8EqmlLETB1Uxn6uylbJnnQZ8rEj7f5km6i60mjibufKXl/CU9Gyfrqcs0UZ3iy60R/pRVqJm9VUPgTybaLupIGK1hXayetHN6RJcfEyN9idqqZk+ggVNb3fEJV/D8Bg+K7vQKfbeWkhlGNj3th3KYFYd2f8486Y86x4R6vA+PZhvpb7DNjkk6Lrd6M9mk5d0OGap8Z5K67QnNkPHaWSkRN4ysy/ZiTKUrL4f2iv1lDpQsuy4yaprFc0uvi/g8I1M/8eX+p2b63jGw+G0Dby7wJSPAzJrcACQdVlJrA9lxzZ+g+L6s3qJkfo2JgFW+HI70o2aaHxkHfYjt1WJXFcCWRl/qZpi5P8lC09kgrtp49sinXeL0NSsztvozcIHmL1z7vyR/42bR2arGNVgW8cde3ImLf3LknUNmfq0zcnqRicG3zBQYzMTHBDLkswBefNaG61oViVY15dNlFE57JI67qBhTZsAlwY+EdCsfzNLzfKSZ0LhAXhy9RcwsNLPnfiAfDjkY0TJhm7CMNuG1OlvsseFyI8ueb9q7hXuwnIVht8T4of0p61Kw446ao3mlQj7/sJhcdVX0G6wD5yAxKG2tWJVdJW4rZEwct1sseaZBDG/rFjGWneJp22wqrg6jWeOJX5YUn1InVvWkihs2ri0s6xUxBhe+O+3OqlpHfnqpVVQNuSQmN14S/lHtIx/NnThyU2SjGHkgVcyqqhLZTzSNKFgVI56yHBEeQbMjNhR/LQ7e6RFWx26R8aQrc0v6cCzBHs3en4Uuv0oceuREw1QnTn3YKILaH4qHfe1RpDSImjAHfAPOCHnoLpFb81C0N3gw2bZit8eJzU2dIir6vuitbxR2LX0oKK8QSZofxZLgOpEzqlHsld8UJ6fKeD3XhfUfybhpdEY3UEPcUimVtjkwq7JF6Ke1izeSHwj3k86khLrR1OCCtE7K3wd4skqn5K7Kgf3Jv/+nTC+UinC5lJVXlP+3T9yvxopnd+v/cmbD7N/vKFCNFHtdHTkaP0BIjm+LMCVtEgEdN//sI2VeiSgiVvS06TnXGRzhkPrZn74LA2tH/qYzlw0REWllEelFhWLd7ZXivw1CkBc=TimeStep0.000000NIFTI_XFORM_UNKNOWNNIFTI_XFORM_UNKNOWN 1.000000 0.000000 0.000000 0.000000 + 0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000eJx9lnl0jecWh+MkJznJOTnn5MxzTgZB4lapoppKZD+GkqC0pURvb5egtCXcKkprdkurglSklBgqNYRbU5UWbYypmEUFISSUJhoqCJL71V3tWv3j3v3PXt9+v/W9e/3273veN65vCDUbI/lOnPhL6qSwKIIjZbE4x7gIUKLbET8ppgbRzLkvc/cclbun1ITNs3EjwMydtSpq0jxMyraQk+NhxxsWOl10ElMVwtjTXsLSXJxb42FvTBhZWTby07x0665CNcaJMT6KufvDKREHRd+GPN5rQm4kpz6fLs0mHBL/u9Mk2vCr6FbrHq9dXBfOhwkWhvXyUR3lYs9eO/PaWEkqCcMyWo8u7K54m7k49nU4KxcZSFtk5/wEI5dORvBBoJNOt0Ipm2ek6ICDSdV+TvfVcbBJpSzuGsf6LBfD0p0kPDCz7ryBZqPt3KqxYl7iRb/SxrGJTuqvWbiu9BBdHsQn3S20+VJL8igXXUZo6JjaINEbtJzabaTyZz3hT3qJGhRA2sxAaiY6GLoimiXt/FQHuWkR52RZkod+W53IuiA2Vyk6pzrIvhLBLwuCuav07RxlZv5RHXHbYllr9DL02Vi2qzx0ahZGsNZKr2Qfhxsr9blh9HzJRt/JWs6krpWBu9284I+l5+F4bhviSTL6aLbSx+Q8+2MN/wi6uFLCHh2RJoGxeHpbSO5jZUx5U+JOueiu2iB9UuplYpGG5UU3pcmPm2Xrj2dEqysVz7g6OdPpljyVkyALDuySRoPSpe3DjcKwCnlzrYaS3CMyvjqQn67fl4c39Gi76rl1N5BZy7Vsr/pc/tj/7IZXpHN43uPnmhvelN/zgRgT302x0n5gKFP23xTrngZpfs3G8/0jmdfOxvvxNlI/9aI956GN10Hh62aO5eqp6G8i4Qcr41bZCJjrolzx7+zVEdiG68k+6uDBDCtHW4azpjiU96ZYKN0SSLdSNc9Y7kiD+o5k5xtZlOtgyxY77d4IY+QMM5dnhzB8ZQD6SgsdAtQ8nO1gqi4KTV4M1dvDeXaohmNTvdQuNDF9oZ0DIzx0veXGudrEcJWL1760MjpeyZluFh63MxkHL564I3vNGkYOMLOsaSiJXYMZMchMwg0zX0WZqZxvZ/MyHXc/cNPjiSjU59xs/4cDe76DL4pMZHUP4Jd3q6XccEpqFuk42aCj7zdqisOiaBltY12/SPY/GU/CpDCWvOhgeL2bLnYHnXtE0qbGTas9djq8auezAW7q+jo5c8RA1eUIfEYbg8ZZUb+l53Z3G2uP2NHt9KFVvJC504V/jvJv3zaz2xXNskdO8pR+Dt3zKV5RavlWlmy1EXjcw61MC7ezdLzxwMTnvc0U5JvJUGY2vd5IcLGBy2WhnByiY3u6gd67LByNNyH77AzLVlPaWcs7S8P5UW8i5oGevEALMbgo+iqC3B16kpw24reaeLXAxZIDKioP6Njzso+KYAcviJ6qTA1xXhVL79nYnmri7SQd9hbVMnWRkaG1inb2cMWHYSxPCCUzyEDhaSeqC6FsyjBjHOLgYpaHxCVGCnLs2A56aN7JQqv3TIzq6WL2tlo5/n6d1H8TzDtttLR4W4PplBFn7ygiir1MW+zjozlRvKKwIu9uY3KwsrLYT7vkOAYeiOVjLPjH22jdx4b2rItUhY+XPvNxVGFo3LcORqqdrKp0MHqNjU4ZEcQo2g60eyk2RzLN7CR5vo2NM/S0+9ZC1ylG9u6tl/l1oWx81ckrKi899iiMKnewzeIkapuTE4q2V/Z7mNzTwr3ZZrrdDqZ0wEOZPSCMwbu0eJI1xEwKp2uOmw5JDkq+cNA3xky/m8HYA/WUpWsZXaxj8h0dRRoDx3d6eHmfk/ndXDy3zE3AC1786X4iJJrdu2wsqLbhrvXy3TUnK3QO0ircaG+5WN0jmrBZUaQW28kqVbykfLdZuZm4Z3RMW9ggx7Y5mHzeTfFIP+VnY0gti8GgjuGTBC9ZyZH8OsrNh5EeIn6z0/onI/WdHawYY2LX9BCat1RxtUMEJ/fpWbzIw4y/G7jUXoNuSb0Mf07hzZpyOXm7SmY1XJKJcV7WPRXF0CAfP+DBO1Zh5Cw/ieuVud2LZXl6YwrPNibTGsNzpU2oCjGT3d7HO+5oer4bzUcJUbyZ5yf3hoOHWY25f9rPVWXWbSMiyR3ThGfXN6VriwfyYWJjLuTEcD3A8RfW/q9Qr/hUrirc2pRoTPmjtvDj038ysmkfH6OPu2mwKNpuiGTO6z6e+TWOTT/HcrbKTKtyKzPesrG8j4l+unop2GXltUFuVs2MpmBwDOOnu0hv7SOteRzql7Kl0YVIlg2LY+/df6f8sCBHOn7kIc2cJ4Nzw2h1XYXh5EPRmk2oDldI7rwE9FstiDLbFmsLJb9hkxSeKJWmrZ3MuNlERlT8S4ZdOybBmyN4esgq2d34shzsXy8Xz+dJ1QQftWdasXS9ntQvDPRXq+m2f6rsealWOic1yN9WBVM2S8sTW9Tktr0tX1eelrqMEtnx/J2OM3e82HHX1quyZtVUGT/4uFy7fiO5S3o/qRy0RTLyh6U83fagFNbVSzAPpXehht9CgliZ0QjD1N+koP6EPHpejVWjZufSq3Jo3AMpG9uIioorciRThWffPkkZuFKGz74vOYd0nH9CYVWvIRIxtk48PWpkbuZ1eT8piJKAw1ISdEVy8s9Ll++viifviiR2jmBzTQiPOhu5qnCgZYiDy0l6Eq0aEv55U45bamXxV7VS3CSYQRmhrF6ooeWNcJJTdPSaZSEqREWS77/3qUnnCqVPgZYByrn7//yxPHygaDu4/vLOJOWM+T0HfdNR8joEcXpnCyluuygluixbDFlVf/rINed7uTczXVQZHtyvN0uZmVXw51p3udDx9/xZYpJsvLQvpenHW6R96WT5D2vigws= \ No newline at end of file From 641feaa14ca5c614c8eea339fcc687027c2ae2cc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 12 Sep 2019 10:06:18 -0400 Subject: [PATCH 260/689] TEST: Test new test_data function --- nibabel/tests/test_testing.py | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 40d5ebc41e..bd6762b8de 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -3,15 +3,15 @@ from __future__ import division, print_function, absolute_import import sys +import os import warnings import numpy as np -from nose.tools import assert_equal -from nose.tools import assert_raises +from nose.tools import assert_equal, assert_true, assert_false, assert_raises from ..testing import (error_warnings, suppress_warnings, clear_and_catch_warnings, assert_allclose_safely, - get_fresh_mod, assert_re_in) + get_fresh_mod, assert_re_in, test_data, data_path) def assert_warn_len_equal(mod, n_in_context): @@ -163,3 +163,26 @@ def test_assert_re_in(): # Shouldn't "match" the empty list assert_raises(AssertionError, assert_re_in, "", []) + + +def test_test_data(): + assert_equal(test_data(), data_path) + assert_equal(test_data(), + os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', 'tests', 'data'))) + for subdir in ('nicom', 'gifti', 'externals'): + assert_equal(test_data(subdir), data_path[:-10] + '%s/tests/data' % subdir) + assert_true(os.path.exists(test_data(subdir))) + assert_false(os.path.exists(test_data(subdir, 'doesnotexist'))) + + for subdir in ('freesurfer', 'doesnotexist'): + with assert_raises(ValueError): + test_data(subdir) + + assert_false(os.path.exists(test_data(None, 'doesnotexist'))) + + for subdir, fname in [('gifti', 'ascii.gii'), + ('nicom', '0.dcm'), + ('externals', 'example_1.nc'), + (None, 'empty.tck')]: + assert_true(os.path.exists(test_data(subdir, fname))) From b021b2dffa5b65b013f94c103c5ff5f671830614 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 10 Sep 2019 21:55:38 -0400 Subject: [PATCH 261/689] TEST: Check for failing GIFTI encodings --- nibabel/gifti/tests/test_gifti.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index f1285d441d..60f7a539ec 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -21,6 +21,7 @@ from nibabel.testing import clear_and_catch_warnings from .test_parse_gifti_fast import (DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6) +import itertools def test_gifti_image(): @@ -400,3 +401,20 @@ def test_data_array_round_trip(): gio = GiftiImage.from_file_map(fmap) vertices = gio.darrays[0].data assert_array_equal(vertices, verts) + + +def test_type_coercion(): + dtypes = (np.uint8, np.int32, np.int64, np.float32, np.float64) + encodings = ('ASCII', 'B64BIN', 'B64GZ') + for data_dtype, darray_dtype, encoding in itertools.product(dtypes, + dtypes, + encodings): + da = GiftiDataArray(np.arange(10).astype(data_dtype), + encoding=encoding, + intent='NIFTI_INTENT_NODE_INDEX', + datatype=darray_dtype) + gii = GiftiImage(darrays=[da]) + gii_copy = GiftiImage.from_bytes(gii.to_bytes()) + da_copy = gii_copy.darrays[0] + assert_equal(np.dtype(da_copy.data.dtype), np.dtype(darray_dtype)) + assert_array_equal(da_copy.data, da.data) From 2838c49ddf84981fa1b2a43646ac423340c03861 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 10 Sep 2019 22:47:01 -0400 Subject: [PATCH 262/689] FIX: Coerce data types on writing GIFTI DataArrays Includes a hack to keep the old data_tag API functional --- nibabel/gifti/gifti.py | 16 ++++++++++------ nibabel/gifti/tests/test_gifti.py | 2 +- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 22d6449e9a..5a69644f8e 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -270,16 +270,21 @@ def _to_xml_element(self): return DataTag(dataarray, encoding, datatype, ordering).to_xml() -def _data_tag_element(dataarray, encoding, datatype, ordering): +def _data_tag_element(dataarray, encoding, dtype, ordering): """ Creates data tag with given `encoding`, returns as XML element """ import zlib - ord = array_index_order_codes.npcode[ordering] + order = array_index_order_codes.npcode[ordering] enclabel = gifti_encoding_codes.label[encoding] if enclabel == 'ASCII': - da = _arr2txt(dataarray, datatype) + # XXX Accommodating data_tag API + # On removal (nibabel 4.0) drop str case + da = _arr2txt(dataarray, dtype if isinstance(dtype, str) else KIND2FMT[dtype.kind]) elif enclabel in ('B64BIN', 'B64GZ'): - out = dataarray.tostring(ord) + # XXX Accommodating data_tag API - don't try to fix dtype + if isinstance(dtype, str): + dtype = dataarray.dtype + out = np.asanyarray(dataarray, dtype).tostring(order) if enclabel == 'B64GZ': out = zlib.compress(out) da = base64.b64encode(out).decode() @@ -462,11 +467,10 @@ def _to_xml_element(self): if self.coordsys is not None: data_array.append(self.coordsys._to_xml_element()) # write data array depending on the encoding - dt_kind = data_type_codes.dtype[self.datatype].kind data_array.append( _data_tag_element(self.data, gifti_encoding_codes.specs[self.encoding], - KIND2FMT[dt_kind], + data_type_codes.dtype[self.datatype], self.ind_ord)) return data_array diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 60f7a539ec..8ef7846f51 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -403,7 +403,7 @@ def test_data_array_round_trip(): assert_array_equal(vertices, verts) -def test_type_coercion(): +def test_darray_dtype_coercion_failures(): dtypes = (np.uint8, np.int32, np.int64, np.float32, np.float64) encodings = ('ASCII', 'B64BIN', 'B64GZ') for data_dtype, darray_dtype, encoding in itertools.product(dtypes, From b46efa35fe6170168db6bd5b1f6607a07389e69c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 13 Sep 2019 13:14:38 -0400 Subject: [PATCH 263/689] MAINT: Check nightly builds on 3.7 --- .travis.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4c1e535651..af42de237a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,9 +8,7 @@ dist: xenial sudo: true language: python -cache: - directories: - - $HOME/.cache/pip +cache: pip env: global: - SETUP_REQUIRES="pip setuptools>=30.3.0 wheel" @@ -54,8 +52,8 @@ matrix: - python: 3.5 env: - DEPENDS="numpy git+https://github.com/pydicom/pydicom.git@master" - # test 3.5 against pre-release builds of everything - - python: 3.5 + # test 3.7 against pre-release builds of everything + - python: 3.7 env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" - python: 3.5 From 5b4d03afef8cbfa4e90b451a1074fc41081c0c1b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 13 Sep 2019 13:50:07 -0400 Subject: [PATCH 264/689] MAINT: Check nightly builds on 3.7 --- .travis.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index e6111e8790..9a98028c2b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,9 +8,7 @@ dist: xenial sudo: true language: python -cache: - directories: - - $HOME/.cache/pip +cache: pip env: global: - DEPENDS="six numpy scipy matplotlib h5py pillow pydicom" @@ -59,8 +57,8 @@ matrix: - python: 2.7 env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" - # test 3.5 against pre-release builds of everything - - python: 3.5 + # test 3.7 against pre-release builds of everything + - python: 3.7 env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" - python: 2.7 From 20df4c83cc2907b9acff44f7aba01fcf1350bc4b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 15 Sep 2019 13:22:57 -0400 Subject: [PATCH 265/689] TEST: Reproduce gh-802 --- nibabel/tests/test_image_api.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 8792fe938e..7e445d55fc 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -150,8 +150,11 @@ def validate_filenames(self, imaker, params): # to_ / from_ filename fname = 'another_image' + self.standard_extension with InTemporaryDirectory(): - img.to_filename(fname) - rt_img = img.__class__.from_filename(fname) + # Validate that saving or loading a file doesn't use deprecated methods internally + with clear_and_catch_warnings() as w: + warnings.simplefilter('error', DeprecationWarning) + img.to_filename(fname) + rt_img = img.__class__.from_filename(fname) assert_array_equal(img.shape, rt_img.shape) assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) # get_data will be deprecated From c8d93e6067ec793c294d46fb1d4aee805b188a59 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 15 Sep 2019 13:23:14 -0400 Subject: [PATCH 266/689] ENH: Remove img.get_data() from internal use except for appropriate tests --- nibabel/__init__.py | 2 +- nibabel/analyze.py | 2 +- nibabel/brikhead.py | 2 +- nibabel/cifti2/tests/test_cifti2io_axes.py | 14 ++++---- nibabel/cifti2/tests/test_cifti2io_header.py | 4 +-- nibabel/cifti2/tests/test_new_cifti2.py | 27 +++++++------- nibabel/cmdline/ls.py | 2 +- nibabel/dataobj_images.py | 4 +-- nibabel/ecat.py | 4 +-- nibabel/filebasedimages.py | 2 +- nibabel/freesurfer/mghformat.py | 2 +- nibabel/freesurfer/tests/test_mghformat.py | 8 ++--- nibabel/funcs.py | 7 ++-- nibabel/loadsave.py | 4 +-- nibabel/minc2.py | 2 +- nibabel/processing.py | 2 +- nibabel/spaces.py | 2 +- nibabel/spatialimages.py | 13 +++---- nibabel/tests/data/check_parrec_reslice.py | 6 ++-- nibabel/tests/test_analyze.py | 13 +++---- nibabel/tests/test_brikhead.py | 8 ++--- nibabel/tests/test_ecat.py | 16 ++++----- nibabel/tests/test_ecat_data.py | 2 +- nibabel/tests/test_filebasedimages.py | 6 ++++ nibabel/tests/test_files_interface.py | 8 ++--- nibabel/tests/test_funcs.py | 10 +++--- nibabel/tests/test_image_api.py | 31 ++++++++-------- nibabel/tests/test_image_load_save.py | 38 ++++++++++---------- nibabel/tests/test_loadsave.py | 6 ++-- nibabel/tests/test_minc1.py | 10 +++--- nibabel/tests/test_minc2_data.py | 4 +-- nibabel/tests/test_nifti1.py | 30 ++++++++-------- nibabel/tests/test_parrec.py | 4 +-- nibabel/tests/test_processing.py | 2 +- nibabel/tests/test_proxy_api.py | 4 +-- nibabel/tests/test_round_trip.py | 2 +- nibabel/tests/test_scripts.py | 20 +++++------ nibabel/tests/test_spatialimages.py | 37 +++++++++++-------- nibabel/tests/test_spm99analyze.py | 32 ++++++++--------- 39 files changed, 207 insertions(+), 185 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 2e4f877c5f..3e57643fc1 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -23,7 +23,7 @@ img2 = nib.load('other_file.nii.gz') img3 = nib.load('spm_file.img') - data = img1.get_data() + data = img1.get_fdata() affine = img1.affine print(img1) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 8015715590..dc352505c6 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -1009,7 +1009,7 @@ def to_file_map(self, file_map=None): ''' if file_map is None: file_map = self.file_map - data = self.get_data() + data = np.asanyarray(self.dataobj) self.update_header() hdr = self._header out_dtype = self.get_data_dtype() diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 49182ba705..c5847a87a8 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -486,7 +486,7 @@ class AFNIImage(SpatialImage): [ 0. , 0. , 3. , -52.3511], [ 0. , 0. , 0. , 1. ]]) >>> head = load(os.path.join(datadir, 'example4d+orig.HEAD')) - >>> np.array_equal(head.get_data(), brik.get_data()) + >>> np.array_equal(head.get_fdata(), brik.get_fdata()) True """ diff --git a/nibabel/cifti2/tests/test_cifti2io_axes.py b/nibabel/cifti2/tests/test_cifti2io_axes.py index 4089395b78..c237e3c61a 100644 --- a/nibabel/cifti2/tests/test_cifti2io_axes.py +++ b/nibabel/cifti2/tests/test_cifti2io_axes.py @@ -93,8 +93,8 @@ def check_rewrite(arr, axes, extension='.nii'): (fd, name) = tempfile.mkstemp(extension) cifti2.Cifti2Image(arr, header=axes).to_filename(name) img = nib.load(name) - arr2 = img.get_data() - assert (arr == arr2).all() + arr2 = img.get_fdata() + assert np.allclose(arr, arr2) for idx in range(len(img.shape)): assert (axes[idx] == img.header.get_axis(idx)) return img @@ -103,7 +103,7 @@ def check_rewrite(arr, axes, extension='.nii'): @needs_nibabel_data('nitest-cifti2') def test_read_ones(): img = nib.load(os.path.join(test_directory, 'ones.dscalar.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert (arr == 1).all() assert isinstance(axes[0], cifti2_axes.ScalarAxis) @@ -118,7 +118,7 @@ def test_read_ones(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_dscalar(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dscalar.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.ScalarAxis) assert len(axes[0]) == 2 @@ -132,7 +132,7 @@ def test_read_conte69_dscalar(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_dtseries(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dtseries.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.SeriesAxis) assert len(axes[0]) == 2 @@ -147,7 +147,7 @@ def test_read_conte69_dtseries(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_dlabel(): img = nib.load(os.path.join(test_directory, 'Conte69.parcellations_VGD11b.32k_fs_LR.dlabel.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.LabelAxis) assert len(axes[0]) == 3 @@ -162,7 +162,7 @@ def test_read_conte69_dlabel(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_ptseries(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.ptseries.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.SeriesAxis) assert len(axes[0]) == 2 diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 3e3cd9c77d..b8cbd05a32 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -63,11 +63,11 @@ def test_read_and_proxies(): assert_equal(img2.shape, (1, 91282)) # While we cannot reshape arrayproxies, all images are in-memory assert_true(not img2.in_memory) - data = img2.get_data() + data = img2.get_fdata() assert_true(data is not img2.dataobj) # Uncaching has no effect, images are always array images img2.uncache() - assert_true(data is not img2.get_data()) + assert_true(data is not img2.get_fdata()) @needs_nibabel_data('nitest-cifti2') diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index 2a157ca7fb..15c64e84c3 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -13,7 +13,8 @@ from nibabel.tmpdirs import InTemporaryDirectory from nose.tools import assert_true, assert_equal, assert_raises -from nibabel.testing import clear_and_catch_warnings, error_warnings, suppress_warnings +from nibabel.testing import ( + clear_and_catch_warnings, error_warnings, suppress_warnings, assert_array_equal) affine = [[-1.5, 0, 0, 90], [0, 1.5, 0, -85], @@ -246,7 +247,7 @@ def test_dtseries(): assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseSeries') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_series_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -268,7 +269,7 @@ def test_dscalar(): img2 = nib.load('test.dscalar.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseScalar') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_scalar_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -290,7 +291,7 @@ def test_dlabel(): img2 = nib.load('test.dlabel.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseLabel') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_label_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -310,7 +311,7 @@ def test_dconn(): img2 = nib.load('test.dconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDense') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_geometry_map(img2.header.matrix.get_index_map(0)) @@ -333,7 +334,7 @@ def test_ptseries(): img2 = nib.load('test.ptseries.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnParcelSries') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_series_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -355,7 +356,7 @@ def test_pscalar(): img2 = nib.load('test.pscalar.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnParcelScalr') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_scalar_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -377,7 +378,7 @@ def test_pdconn(): img2 = ci.load('test.pdconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnParcelDense') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_geometry_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -399,7 +400,7 @@ def test_dpconn(): img2 = ci.load('test.dpconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseParcel') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_parcel_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -420,7 +421,7 @@ def test_plabel(): img2 = ci.load('test.plabel.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnUnknown') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_label_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -440,7 +441,7 @@ def test_pconn(): img2 = ci.load('test.pconn.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnParcels') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_parcel_map(img2.header.matrix.get_index_map(0)) @@ -465,7 +466,7 @@ def test_pconnseries(): img2 = ci.load('test.pconnseries.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnPPSr') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_parcel_map(img2.header.matrix.get_index_map(0)) @@ -491,7 +492,7 @@ def test_pconnscalar(): img2 = ci.load('test.pconnscalar.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnPPSc') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) diff --git a/nibabel/cmdline/ls.py b/nibabel/cmdline/ls.py index 68bd6ee8c0..ea2e4032ae 100755 --- a/nibabel/cmdline/ls.py +++ b/nibabel/cmdline/ls.py @@ -125,7 +125,7 @@ def proc_file(f, opts): if opts.stats or opts.counts: # We are doomed to load data try: - d = vol.get_data() + d = np.asarray(vol.dataobj) if not opts.stats_zeros: d = d[np.nonzero(d)] else: diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 9ba97789dc..3c0558f43e 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -68,8 +68,8 @@ def get_data(self, caching='fill'): We recommend you use the ``get_fdata`` method instead of the ``get_data`` method, because it is easier to predict the return - data type. We will deprecate the ``get_data`` method around April - 2018, and remove it around April 2020. + data type. ``get_data`` will be deprecated around November 2019 + and removed around November 2021. If you don't care about the predictability of the return data type, and you want the minimum possible data size in memory, you can diff --git a/nibabel/ecat.py b/nibabel/ecat.py index fef2741ef8..f3a7f1736c 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -788,7 +788,7 @@ def __init__(self, dataobj, affine, header, >>> frame0 = img.get_frame(0) >>> frame0.shape == (10, 10, 3) True - >>> data4d = img.get_data() + >>> data4d = img.get_fdata() >>> data4d.shape == (10, 10, 3, 1) True """ @@ -945,7 +945,7 @@ def to_file_map(self, file_map=None): # It appears to be necessary to load the data before saving even if the # data itself is not used. - self.get_data() + self.get_fdata() hdr = self.header mlist = self._mlist subheaders = self.get_subheaders() diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 64b79550e3..86ce837942 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -120,7 +120,7 @@ class FileBasedImage(object): You can get the data out again with:: - img.get_data() + img.get_fdata() Less commonly, for some image types that support it, you might want to fetch out the unscaled array via the object containing the data:: diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 37bc82cfb3..ddb30cb796 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -594,7 +594,7 @@ def to_file_map(self, file_map=None): ''' if file_map is None: file_map = self.file_map - data = self.get_data() + data = np.asanyarray(self.dataobj) self.update_header() hdr = self.header with file_map['image'].get_prepare_fileobj('wb') as mghf: diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 47e54080c3..289acbcd01 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -81,7 +81,7 @@ def test_read_mgh(): assert_array_almost_equal(h.get_vox2ras_tkr(), v2rtkr) # data. will be different for your own mri_volsynth invocation - v = mgz.get_data() + v = mgz.get_fdata() assert_almost_equal(v[1, 2, 3, 0], -0.3047, 4) assert_almost_equal(v[1, 2, 3, 1], 0.0018, 4) @@ -97,7 +97,7 @@ def test_write_mgh(): # read from the tmp file and see if it checks out mgz = load('tmpsave.mgz') h = mgz.header - dat = mgz.get_data() + dat = mgz.get_fdata() # Delete loaded image to allow file deletion by windows del mgz # header @@ -193,7 +193,7 @@ def test_filename_exts(): save(img, fname) # read from the tmp file and see if it checks out img_back = load(fname) - assert_array_equal(img_back.get_data(), v) + assert_array_equal(img_back.get_fdata(), v) del img_back @@ -288,7 +288,7 @@ def test_mgh_load_fileobj(): fm = MGHImage.make_file_map(mapping=dict(image=bio)) img2 = MGHImage.from_file_map(fm) assert_true(img2.dataobj.file_like is bio) - assert_array_equal(img.get_data(), img2.get_data()) + assert_array_equal(img.get_fdata(), img2.get_fdata()) def test_mgh_affine_default(): diff --git a/nibabel/funcs.py b/nibabel/funcs.py index 240b20f802..178ac8191c 100644 --- a/nibabel/funcs.py +++ b/nibabel/funcs.py @@ -79,8 +79,7 @@ def squeeze_image(img): if slen == len(shape): return klass.from_image(img) shape = shape[:slen] - data = img.get_data() - data = data.reshape(shape) + data = np.asanyarray(img.dataobj).reshape(shape) return klass(data, img.affine, img.header, @@ -144,7 +143,7 @@ def concat_images(images, check_affines=True, axis=None): raise ValueError('Affine for image {0} does not match affine ' 'for first image'.format(i)) # Do not fill cache in image if it is empty - out_data[i] = img.get_data(caching='unchanged') + out_data[i] = np.asanyarray(img.dataobj) if axis is None: out_data = np.rollaxis(out_data, 0, out_data.ndim) @@ -169,7 +168,7 @@ def four_to_three(img): imgs : list list of 3D images ''' - arr = img.get_data() + arr = np.asanyarray(img.dataobj) header = img.header affine = img.affine image_maker = img.__class__ diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index cd1efbe3d7..421b95ba2f 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -155,7 +155,7 @@ def read_img_data(img, prefer='scaled'): """ Read data from image associated with files If you want unscaled data, please use ``img.dataobj.get_unscaled()`` - instead. If you want scaled data, use ``img.get_data()`` (which will cache + instead. If you want scaled data, use ``img.get_fdata()`` (which will cache the loaded array) or ``np.array(img.dataobj)`` (which won't cache the array). If you want to load the data as for a modified header, save the image with the modified header, and reload. @@ -164,7 +164,7 @@ def read_img_data(img, prefer='scaled'): ---------- img : ``SpatialImage`` Image with valid image file in ``img.file_map``. Unlike the - ``img.get_data()`` method, this function returns the data read + ``img.get_fdata()`` method, this function returns the data read from the image file, as specified by the *current* image header and *current* image files. prefer : str, optional diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 37821409c4..b27d43f77f 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -16,7 +16,7 @@ import nibabel as nib img = nib.load('my_funny.mnc') - data = img.get_data() + data = img.get_fdata() print(data.mean()) print(data.max()) print(data.min()) diff --git a/nibabel/processing.py b/nibabel/processing.py index 449e6b41fc..0c5f921d87 100644 --- a/nibabel/processing.py +++ b/nibabel/processing.py @@ -233,7 +233,7 @@ def resample_to_output(in_img, # looks like when resampled into world coordinates if n_dim < 3: # Expand image to 3D, make voxel sizes match new_shape = in_shape + (1,) * (3 - n_dim) - data = in_img.get_data().reshape(new_shape) # 2D data should be small + data = np.asanyarray(in_img.dataobj).reshape(new_shape) # 2D data should be small in_img = out_class(data, in_img.affine, in_img.header) if voxel_sizes is not None and len(voxel_sizes) == n_dim: # Need to pad out voxel sizes to match new image dimensions diff --git a/nibabel/spaces.py b/nibabel/spaces.py index 393a8a216f..094f43dc77 100644 --- a/nibabel/spaces.py +++ b/nibabel/spaces.py @@ -112,7 +112,7 @@ def slice2volume(index, axis, shape=None): and then use ``whole_aff`` in ``scipy.ndimage.affine_transform``: rzs, trans = to_matvec(whole_aff) - data = img2.get_data() + data = img2.get_fdata() new_slice = scipy.ndimage.affine_transform(data, rzs, trans, slice_shape) Parameters diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index ede0820065..fd2795e96a 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -20,7 +20,8 @@ methods: - * .get_data() + * .get_fdata() + * .get_data() (deprecated, use get_fdata() instead) * .get_affine() (deprecated, use affine property instead) * .get_header() (deprecated, use header property instead) * .to_filename(fname) - writes data to filename(s) derived from @@ -69,7 +70,7 @@ You can get the data out again with:: - img.get_data() + img.get_fdata() Less commonly, for some image types that support it, you might want to fetch out the unscaled array via the object containing the data:: @@ -123,12 +124,12 @@ >>> img.to_file_map() >>> # read it back again from the written files >>> img2 = nib.AnalyzeImage.from_file_map(file_map) - >>> np.all(img2.get_data() == data) + >>> np.all(img2.get_fdata(dtype=np.float32) == data) True >>> # write, read it again >>> img2.to_file_map() >>> img3 = nib.AnalyzeImage.from_file_map(file_map) - >>> np.all(img3.get_data() == data) + >>> np.all(img3.get_fdata(dtype=np.float32) == data) True ''' @@ -586,7 +587,7 @@ def __getitem__(self, idx): "Cannot slice image objects; consider using `img.slicer[slice]` " "to generate a sliced image (see documentation for caveats) or " "slicing image array data with `img.dataobj[slice]` or " - "`img.get_data()[slice]`") + "`img.get_fdata()[slice]`") def orthoview(self): """Plot the image using OrthoSlicer3D @@ -630,7 +631,7 @@ def as_reoriented(self, ornt): if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]): return self - t_arr = apply_orientation(self.get_data(), ornt) + t_arr = apply_orientation(np.asanyarray(self.dataobj), ornt) new_aff = self.affine.dot(inv_ornt_aff(ornt, self.shape)) return self.__class__(t_arr, new_aff, self.header) diff --git a/nibabel/tests/data/check_parrec_reslice.py b/nibabel/tests/data/check_parrec_reslice.py index cc2a5942b5..c7352c3f89 100644 --- a/nibabel/tests/data/check_parrec_reslice.py +++ b/nibabel/tests/data/check_parrec_reslice.py @@ -39,7 +39,7 @@ def resample_img2img(img_to, img_from, order=1, out_class=nib.Nifti1Image): from scipy import ndimage as spnd vox2vox = npl.inv(img_from.affine).dot(img_to.affine) rzs, trans = to_matvec(vox2vox) - data = spnd.affine_transform(img_from.get_data(), + data = spnd.affine_transform(img_from.get_fdata(), rzs, trans, img_to.shape, @@ -57,7 +57,7 @@ def gmean_norm(data): np.set_printoptions(suppress=True, precision=4) normal_fname = "Phantom_EPI_3mm_tra_SENSE_6_1.PAR" normal_img = parrec.load(normal_fname) - normal_data = normal_img.get_data() + normal_data = normal_img.get_fdata() normal_normed = gmean_norm(normal_data) print("RMS of standard image {:<44}: {}".format( @@ -69,7 +69,7 @@ def gmean_norm(data): continue funny_img = parrec.load(parfile) fixed_img = resample_img2img(normal_img, funny_img) - fixed_data = fixed_img.get_data() + fixed_data = fixed_img.get_fdata() difference_data = normal_normed - gmean_norm(fixed_data) print('RMS resliced {:<52} : {}'.format( parfile, diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 45a4c00d62..6b05df83e3 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -717,8 +717,8 @@ def test_default_header(self): def test_data_hdr_cache(self): # test the API for loaded images, such that the data returned - # from img.get_data() is not affected by subsequent changes to - # the header. + # from np.asanyarray(img.dataobj) and img,get_fdata() are not + # affected by subsequent changes to the header. IC = self.image_class # save an image to a file map fm = IC.make_file_map() @@ -739,7 +739,8 @@ def test_data_hdr_cache(self): assert_equal(hdr.get_data_shape(), (3, 2, 2)) hdr.set_data_dtype(np.uint8) assert_equal(hdr.get_data_dtype(), np.dtype(np.uint8)) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) + assert_array_equal(np.asanyarray(img2.dataobj), data) # now check read_img_data function - here we do see the changed # header sc_data = read_img_data(img2) @@ -830,7 +831,7 @@ def test_header_updating(self): hdr_back = img.from_file_map(img.file_map).header assert_array_equal(hdr.get_zooms(), (9, 3, 4)) # Modify data in-place? Update on save - data = img.get_data() + data = img.get_fdata() data.shape = (3, 2, 4) img.to_file_map() img_back = img.from_file_map(img.file_map) @@ -843,7 +844,7 @@ def test_pickle(self): img = img_klass(np.zeros((2, 3, 4)), None) img_str = pickle.dumps(img) img2 = pickle.loads(img_str) - assert_array_equal(img.get_data(), img2.get_data()) + assert_array_equal(img.get_fdata(), img2.get_fdata()) assert_equal(img.header, img2.header) # Save / reload using bytes IO objects for key, value in img.file_map.items(): @@ -852,7 +853,7 @@ def test_pickle(self): img_prox = img.from_file_map(img.file_map) img_str = pickle.dumps(img_prox) img2_prox = pickle.loads(img_str) - assert_array_equal(img.get_data(), img2_prox.get_data()) + assert_array_equal(img.get_fdata(), img2_prox.get_fdata()) def test_no_finite_values(self): # save of data with no finite values to int type raises error if we have diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index a99e6c41b6..d09023d248 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -99,7 +99,7 @@ def test_brikheadfile(self): assert_equal(brik.header.get_zooms(), tp['zooms']) assert_array_equal(brik.affine, tp['affine']) assert_equal(brik.header.get_space(), tp['space']) - data = brik.get_data() + data = brik.get_fdata() assert_equal(data.shape, tp['shape']) assert_array_equal(brik.dataobj.scaling, tp['scaling']) assert_equal(brik.header.get_volume_labels(), tp['labels']) @@ -108,20 +108,20 @@ def test_load(self): # Check highest level load of brikhead works for tp in self.test_files: img = self.module.load(tp['head']) - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, tp['shape']) # min, max, mean values assert_data_similar(data, tp) # check if file can be converted to nifti ni_img = Nifti1Image.from_image(img) assert_array_equal(ni_img.affine, tp['affine']) - assert_array_equal(ni_img.get_data(), data) + assert_array_equal(ni_img.get_fdata(), data) def test_array_proxy_slicing(self): # Test slicing of array proxy for tp in self.test_files: img = self.module.load(tp['fname']) - arr = img.get_data() + arr = img.get_fdata() prox = img.dataobj assert_true(prox.is_proxy) for sliceobj in slicer_samples(img.shape): diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index 9005d32d4f..a3a40b2904 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -193,20 +193,20 @@ def test_save(self): with InTemporaryDirectory(): self.img.to_filename(tmp_file) other = self.image_class.load(tmp_file) - assert_equal(self.img.get_data().all(), other.get_data().all()) + assert_array_equal(self.img.get_fdata(), other.get_fdata()) # Delete object holding reference to temporary file to make Windows # happier. del other def test_data(self): - dat = self.img.get_data() + dat = self.img.get_fdata() assert_equal(dat.shape, self.img.shape) frame = self.img.get_frame(0) assert_array_equal(frame, dat[:, :, :, 0]) def test_array_proxy(self): # Get the cached data copy - dat = self.img.get_data() + dat = self.img.get_fdata() # Make a new one to test arrayproxy img = self.image_class.load(self.example_file) data_prox = img.dataobj @@ -218,7 +218,7 @@ def test_array_proxy(self): def test_array_proxy_slicing(self): # Test slicing of array proxy - arr = self.img.get_data() + arr = self.img.get_fdata() prox = self.img.dataobj assert_true(prox.is_proxy) for sliceobj in slicer_samples(self.img.shape): @@ -227,7 +227,7 @@ def test_array_proxy_slicing(self): def test_isolation(self): # Test image isolated from external changes to affine img_klass = self.image_class - arr, aff, hdr, sub_hdr, mlist = (self.img.get_data(), + arr, aff, hdr, sub_hdr, mlist = (self.img.get_fdata(), self.img.affine, self.img.header, self.img.get_subheaders(), @@ -240,7 +240,7 @@ def test_isolation(self): def test_float_affine(self): # Check affines get converted to float img_klass = self.image_class - arr, aff, hdr, sub_hdr, mlist = (self.img.get_data(), + arr, aff, hdr, sub_hdr, mlist = (self.img.get_fdata(), self.img.affine, self.img.header, self.img.get_subheaders(), @@ -256,7 +256,7 @@ def test_data_regression(self): vals = dict(max=248750736458.0, min=1125342630.0, mean=117907565661.46666) - data = self.img.get_data() + data = self.img.get_fdata() assert_equal(data.max(), vals['max']) assert_equal(data.min(), vals['min']) assert_array_almost_equal(data.mean(), vals['mean']) @@ -277,4 +277,4 @@ def test_from_filespec_deprecation(): # Warning for from_filespec img_speced = EcatImage.from_filespec(ecat_file) assert_equal(len(w), 1) - assert_array_equal(img_loaded.get_data(), img_speced.get_data()) + assert_array_equal(img_loaded.get_fdata(), img_speced.get_fdata()) diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index 471bc6b93c..4b187bf855 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -43,7 +43,7 @@ def test_load(self): assert_equal(img.shape, self.example_params['shape']) assert_equal(img.get_data_dtype(0).type, self.example_params['type']) # Check correspondence of data and recorded shape - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, self.example_params['shape']) # min, max, mean values from given parameters assert_almost_equal(data.min(), self.example_params['min'], 4) diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index c9d256edbb..a9c5668508 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -2,6 +2,7 @@ """ from itertools import product +import warnings import numpy as np @@ -27,6 +28,11 @@ def shape(self): return self.arr.shape def get_data(self): + warnings.warn('Deprecated', DeprecationWarning) + return self.arr + + @property + def dataobj(self): return self.arr def get_fdata(self): diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index 0e9ed88eb9..1994741a1a 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -71,7 +71,7 @@ def test_files_interface(): img.to_file_map() # saves to files img2 = Nifti1Image.from_file_map(img.file_map) # img still has correct data - assert_array_equal(img2.get_data(), img.get_data()) + assert_array_equal(img2.get_fdata(), img.get_fdata()) # fileobjs - pair img = Nifti1Pair(arr, aff) img.file_map['image'].fileobj = BytesIO() @@ -81,7 +81,7 @@ def test_files_interface(): img.to_file_map() # saves to files img2 = Nifti1Pair.from_file_map(img.file_map) # img still has correct data - assert_array_equal(img2.get_data(), img.get_data()) + assert_array_equal(img2.get_fdata(), img.get_fdata()) def test_round_trip_spatialimages(): @@ -99,8 +99,8 @@ def test_round_trip_spatialimages(): img.to_file_map() # read it back again from the written files img2 = klass.from_file_map(file_map) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) # write, read it again img2.to_file_map() img3 = klass.from_file_map(file_map) - assert_array_equal(img3.get_data(), data) + assert_array_equal(img3.get_fdata(), data) diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 8a2a7918d8..447555d6d0 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -108,7 +108,7 @@ def test_concat(): else: assert_false( expect_error, "Expected a concatenation error, but got none.") - assert_array_equal(all_imgs.get_data(), all_data) + assert_array_equal(all_imgs.get_fdata(), all_data) assert_array_equal(all_imgs.affine, affine) # check that not-matching affines raise error @@ -123,7 +123,7 @@ def test_concat(): else: assert_false( expect_error, "Expected a concatenation error, but got none.") - assert_array_equal(all_imgs.get_data(), all_data) + assert_array_equal(all_imgs.get_fdata(), all_data) assert_array_equal(all_imgs.affine, affine) @@ -140,7 +140,7 @@ def test_closest_canonical(): img = AnalyzeImage(arr, np.diag([-1, 1, 1, 1])) xyz_img = as_closest_canonical(img) assert_false(img is xyz_img) - out_arr = xyz_img.get_data() + out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) # Now onto the NIFTI cases (where dim_info also has to be updated) @@ -159,7 +159,7 @@ def test_closest_canonical(): xyz_img = as_closest_canonical(img) assert_false(img is xyz_img) assert_true(img.header.get_dim_info() == xyz_img.header.get_dim_info()) - out_arr = xyz_img.get_data() + out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) # no error for enforce_diag in this case @@ -185,7 +185,7 @@ def test_closest_canonical(): # Check both the original and new objects assert_true(img.header.get_dim_info() == (0, 1, 2)) assert_true(xyz_img.header.get_dim_info() == (0, 2, 1)) - out_arr = xyz_img.get_data() + out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.transpose(arr, (0, 2, 1, 3))) # same axis swap but with None dim info (except for slice dim) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 7e445d55fc..b27665523d 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -14,10 +14,9 @@ * ``img.shape`` (shape of data as read with ``np.array(img.dataobj)`` * ``img.get_fdata()`` (returns floating point data as read with ``np.array(img.dataobj)`` and the cast to float); -* ``img.get_data()`` (returns data as read with ``np.array(img.dataobj)``); -* ``img.uncache()`` (``img.get_data()`` and ``img.get_data`` are allowed to - cache the result of the array creation. If they do, this call empties that - cache. Implement this as a no-op if ``get_fdata()``, ``get_data`` do not +* ``img.uncache()`` (``img.get_fdata()`` and ``img.get_data()`` (deprecated) are + allowed to cache the result of the array creation. If they do, this call empties + that cache. Implement this as a no-op if ``get_fdata()``, ``get_data`` do not cache. * ``img[something]`` generates an informative TypeError * ``img.in_memory`` is True for an array image, and for a proxy image that is @@ -44,7 +43,7 @@ from nose import SkipTest from nose.tools import (assert_true, assert_false, assert_raises, assert_equal) -from numpy.testing import (assert_almost_equal, assert_array_equal) +from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns from ..testing import clear_and_catch_warnings from ..tmpdirs import InTemporaryDirectory @@ -92,7 +91,7 @@ def obj_params(self): ``data_summary`` : dict with data ``min``, ``max``, ``mean``; * ``shape`` : shape of image; * ``affine`` : shape (4, 4) affine array for image; - * ``dtype`` : dtype of data returned from ``get_data()``; + * ``dtype`` : dtype of data returned from ``np.asarray(dataobj)``; * ``is_proxy`` : bool, True if image data is proxied; Notes @@ -131,8 +130,7 @@ def validate_filenames(self, imaker, params): rt_img = bytesio_round_trip(img) assert_array_equal(img.shape, rt_img.shape) assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_img.get_data()) + assert_almost_equal(np.asanyarray(img.dataobj), np.asanyarray(rt_img.dataobj)) # Give the image a file map klass = type(img) rt_img.file_map = bytesio_filemap(klass) @@ -140,8 +138,7 @@ def validate_filenames(self, imaker, params): rt_img.to_file_map() rt_rt_img = klass.from_file_map(rt_img.file_map) assert_almost_equal(img.get_fdata(), rt_rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_rt_img.get_data()) + assert_almost_equal(np.asanyarray(img.dataobj), np.asanyarray(rt_img.dataobj)) # get_ / set_ filename fname = 'an_image' + self.standard_extension img.set_filename(fname) @@ -157,8 +154,7 @@ def validate_filenames(self, imaker, params): rt_img = img.__class__.from_filename(fname) assert_array_equal(img.shape, rt_img.shape) assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_img.get_data()) + assert_almost_equal(np.asanyarray(img.dataobj), np.asanyarray(rt_img.dataobj)) del rt_img # to allow windows to delete the directory def validate_no_slicing(self, imaker, params): @@ -166,6 +162,13 @@ def validate_no_slicing(self, imaker, params): assert_raises(TypeError, img.__getitem__, 'string') assert_raises(TypeError, img.__getitem__, slice(None)) + def validate_get_data_deprecated(self, imaker, params): + # Check deprecated header API + img = imaker() + with assert_warns(DeprecationWarning): + data = img.get_data() + assert_array_equal(np.asanyarray(img.dataobj), data) + class GetSetDtypeMixin(object): """ Adds dtype tests @@ -520,7 +523,7 @@ def validate_from_bytes(self, imaker, params): img_b = klass.from_bytes(fobj.read()) assert self._header_eq(img_a.header, img_b.header) - assert np.array_equal(img_a.get_data(), img_b.get_data()) + assert np.array_equal(img_a.get_fdata(), img_b.get_fdata()) del img_a del img_b @@ -540,7 +543,7 @@ def validate_to_from_bytes(self, imaker, params): assert img_b.to_bytes() == bytes_a assert self._header_eq(img_a.header, img_b.header) - assert np.array_equal(img_a.get_data(), img_b.get_data()) + assert np.array_equal(img_a.get_fdata(), img_b.get_fdata()) del img_a del img_b diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 7101b6a31b..6031d4e851 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -29,7 +29,7 @@ from ..spatialimages import SpatialImage from numpy.testing import assert_array_equal, assert_array_almost_equal -from nose.tools import assert_true, assert_equal, assert_raises +from nose.tools import assert_true, assert_equal, assert_not_equal, assert_raises _, have_scipy, _ = optional_package('scipy') # No scipy=>no SPM-format writing DATA_PATH = pjoin(dirname(__file__), 'data') @@ -38,11 +38,7 @@ def round_trip(img): # round trip a nifti single - sio = BytesIO() - img.file_map['image'].fileobj = sio - img.to_file_map() - img2 = Nifti1Image.from_file_map(img.file_map) - return img2 + return Nifti1Image.from_bytes(img.to_bytes()) def test_conversion_spatialimages(): @@ -61,7 +57,7 @@ def test_conversion_spatialimages(): if not w_class.makeable: continue img2 = w_class.from_image(img) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) assert_array_equal(img2.affine, affine) @@ -74,13 +70,15 @@ def test_save_load_endian(): assert_equal(img.header.endianness, native_code) img2 = round_trip(img) assert_equal(img2.header.endianness, native_code) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) + assert_array_equal(np.asanyarray(img2.dataobj), data) # byte swapped endian image bs_hdr = img.header.as_byteswapped() bs_img = Nifti1Image(data, affine, bs_hdr) assert_equal(bs_img.header.endianness, swapped_code) # of course the data is the same because it's not written to disk - assert_array_equal(bs_img.get_data(), data) + assert_array_equal(bs_img.get_fdata(), data) + assert_array_equal(np.asanyarray(bs_img.dataobj), data) # Check converting to another image cbs_img = AnalyzeImage.from_image(bs_img) # this will make the header native by doing the header conversion @@ -92,17 +90,21 @@ def test_save_load_endian(): assert_equal(cbs_hdr2.endianness, native_code) # Try byteswapped round trip bs_img2 = round_trip(bs_img) - bs_data2 = bs_img2.get_data() + bs_data2 = np.asanyarray(bs_img2.dataobj) + bs_fdata2 = bs_img2.get_fdata() # now the data dtype was swapped endian, so the read data is too assert_equal(bs_data2.dtype.byteorder, swapped_code) assert_equal(bs_img2.header.endianness, swapped_code) assert_array_equal(bs_data2, data) + # but get_fdata uses native endian + assert_not_equal(bs_fdata2.dtype.byteorder, swapped_code) + assert_array_equal(bs_fdata2, data) # Now mix up byteswapped data and non-byteswapped header mixed_img = Nifti1Image(bs_data2, affine) assert_equal(mixed_img.header.endianness, native_code) m_img2 = round_trip(mixed_img) assert_equal(m_img2.header.endianness, native_code) - assert_array_equal(m_img2.get_data(), data) + assert_array_equal(m_img2.get_fdata(), data) def test_save_load(): @@ -119,7 +121,7 @@ def test_save_load(): ni1.save(img, nifn) re_img = nils.load(nifn) assert_true(isinstance(re_img, ni1.Nifti1Image)) - assert_array_equal(re_img.get_data(), data) + assert_array_equal(re_img.get_fdata(), data) assert_array_equal(re_img.affine, affine) # These and subsequent del statements are to prevent confusing # windows errors when trying to open files or delete the @@ -129,20 +131,20 @@ def test_save_load(): spm2.save(img, sifn) re_img2 = nils.load(sifn) assert_true(isinstance(re_img2, spm2.Spm2AnalyzeImage)) - assert_array_equal(re_img2.get_data(), data) + assert_array_equal(re_img2.get_fdata(), data) assert_array_equal(re_img2.affine, affine) del re_img2 spm99.save(img, sifn) re_img3 = nils.load(sifn) assert_true(isinstance(re_img3, spm99.Spm99AnalyzeImage)) - assert_array_equal(re_img3.get_data(), data) + assert_array_equal(re_img3.get_fdata(), data) assert_array_equal(re_img3.affine, affine) ni1.save(re_img3, nifn) del re_img3 re_img = nils.load(nifn) assert_true(isinstance(re_img, ni1.Nifti1Image)) - assert_array_equal(re_img.get_data(), data) + assert_array_equal(re_img.get_fdata(), data) assert_array_equal(re_img.affine, affine) del re_img @@ -173,7 +175,7 @@ def test_two_to_one(): # the offset stays at zero (but is 352 on disk) assert_equal(pimg.header['magic'], b'ni1') assert_equal(pimg.header['vox_offset'], 0) - assert_array_equal(pimg.get_data(), data) + assert_array_equal(pimg.get_fdata(), data) # same for from_image, going from single image to pair format ana_img = ana.AnalyzeImage.from_image(img) assert_equal(ana_img.header['vox_offset'], 0) @@ -211,7 +213,7 @@ def test_negative_load_save(): img.to_file_map() str_io.seek(0) re_img = Nifti1Image.from_file_map(img.file_map) - assert_array_almost_equal(re_img.get_data(), data, 4) + assert_array_almost_equal(re_img.get_fdata(), data, 4) def test_filename_save(): @@ -255,7 +257,7 @@ def test_filename_save(): fname = pjoin(pth, 'image' + out_ext) nils.save(img, fname) rt_img = nils.load(fname) - assert_array_almost_equal(rt_img.get_data(), data) + assert_array_almost_equal(rt_img.get_fdata(), data) assert_true(type(rt_img) is loadklass) # delete image to allow file close. Otherwise windows # raises an error when trying to delete the directory diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 4c1c703389..491cb07b76 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -35,7 +35,7 @@ def test_read_img_data(): ): fpath = pjoin(data_path, fname) img = load(fpath) - data = img.get_data() + data = img.get_fdata() data2 = read_img_data(img) assert_array_equal(data, data2) # These examples have null scaling - assert prefer=unscaled is the same @@ -87,7 +87,7 @@ def test_read_img_data_nifti(): # Load - now the scaling and offset correctly applied img_fname = img.file_map['image'].filename img_back = load(img_fname) - data_back = img_back.get_data() + data_back = img_back.get_fdata() assert_array_equal(data_back, read_img_data(img_back)) # This is the same as if we loaded the image and header separately hdr_fname = (img.file_map['header'].filename @@ -131,7 +131,7 @@ def test_read_img_data_nifti(): with open(img_fname, 'ab') as fobj: fobj.write(b'\x00\x00') img_back = load(img_fname) - data_back = img_back.get_data() + data_back = img_back.get_fdata() assert_array_equal(data_back, read_img_data(img_back)) img_back.header.set_data_offset(1026) # Check we pick up new offset diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index 50f4955917..a4d42fdc36 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -135,7 +135,7 @@ def test_old_namespace(): mimg = MincImage(arr, aff) # Call to create object created warning assert_equal(warns.pop(0).category, FutureWarning) - assert_array_equal(mimg.get_data(), arr) + assert_array_equal(mimg.get_fdata(), arr) # Another old name from ..minc1 import MincFile, Minc1File assert_false(MincFile is Minc1File) @@ -185,20 +185,20 @@ def test_load(self): # Check highest level load of minc works for tp in self.test_files: img = load(tp['fname']) - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, tp['shape']) # min, max, mean values from read in SPM2 / minctools assert_data_similar(data, tp) # check if mnc can be converted to nifti ni_img = Nifti1Image.from_image(img) assert_array_equal(ni_img.affine, tp['affine']) - assert_array_equal(ni_img.get_data(), data) + assert_array_equal(ni_img.get_fdata(), data) def test_array_proxy_slicing(self): # Test slicing of array proxy for tp in self.test_files: img = load(tp['fname']) - arr = img.get_data() + arr = img.get_fdata() prox = img.dataobj assert_true(prox.is_proxy) for sliceobj in slicer_samples(img.shape): @@ -220,7 +220,7 @@ def test_compressed(self): fobj.write(content) fobj.close() img = self.module.load(fname) - data = img.get_data() + data = img.get_fdata() assert_data_similar(data, tp) del img diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index 57146171e9..ebfafa938f 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -64,7 +64,7 @@ def test_load(self): assert_almost_equal(img.affine, self.example_params['affine'], 4) assert_equal(img.get_data_dtype().type, self.example_params['type']) # Check correspondence of data and recorded shape - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, self.example_params['shape']) # min, max, mean values from read in SPM2 assert_almost_equal(data.min(), self.example_params['min'], 4) @@ -74,7 +74,7 @@ def test_load(self): ni_img = Nifti1Image.from_image(img) assert_almost_equal(ni_img.get_affine(), self.example_params['affine'], 2) - assert_array_equal(ni_img.get_data(), data) + assert_array_equal(ni_img.get_fdata(), data) class TestB0(TestEPIFrame): diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 38863e9aa2..0213b615f1 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -361,7 +361,7 @@ def test_freesurfer_ico7_hack(self): nii = load(os.path.join(nitest_path, 'derivative', 'fsaverage', 'surf', 'lh.orig.avg.area.nii')) assert_equal(mgh.shape, nii.shape) - assert_array_equal(mgh.get_data(), nii.get_data()) + assert_array_equal(mgh.get_fdata(), nii.get_fdata()) assert_array_equal(nii.header._structarr['dim'][1:4], np.array([27307, 1, 6])) # Test writing produces consistent nii files @@ -369,8 +369,8 @@ def test_freesurfer_ico7_hack(self): nii.to_filename('test.nii') nii2 = load('test.nii') assert_equal(nii.shape, nii2.shape) - assert_array_equal(nii.get_data(), nii2.get_data()) - assert_array_equal(nii.get_affine(), nii2.get_affine()) + assert_array_equal(nii.get_fdata(), nii2.get_fdata()) + assert_array_equal(nii.affine, nii2.affine) def test_qform_sform(self): HC = self.header_class @@ -975,16 +975,16 @@ def test_load_save(self): assert_equal(img.shape, shape) img.set_data_dtype(npt) img2 = bytesio_round_trip(img) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) with InTemporaryDirectory() as tmpdir: for ext in ('', '.gz', '.bz2'): fname = os.path.join(tmpdir, 'test' + img_ext + ext) img.to_filename(fname) img3 = IC.load(fname) assert_true(isinstance(img3, img.__class__)) - assert_array_equal(img3.get_data(), data) + assert_array_equal(img3.get_fdata(), data) assert_equal(img3.header, img.header) - assert_true(isinstance(img3.get_data(), + assert_true(isinstance(np.asanyarray(img3.dataobj), np.memmap if ext == '' else np.ndarray)) # del to avoid windows errors of form 'The process cannot # access the file because it is being used' @@ -1010,7 +1010,7 @@ def test_load_pixdims(self): assert_array_equal(img_hdr.get_zooms(), [2, 3, 4]) # Save to stringio re_simg = bytesio_round_trip(simg) - assert_array_equal(re_simg.get_data(), arr) + assert_array_equal(re_simg.get_fdata(), arr) # Check qform, sform, pixdims are the same rimg_hdr = re_simg.header assert_array_equal(rimg_hdr.get_qform(), qaff) @@ -1337,7 +1337,7 @@ def test_loadsave_cycle(self): lnim = bytesio_round_trip(wnim) assert_equal(lnim.get_data_dtype(), np.int16) # Scaling applied - assert_array_equal(lnim.get_data(), data * 2. + 8.) + assert_array_equal(lnim.get_fdata(), data * 2. + 8.) # slope, inter reset by image creation, but saved in proxy assert_equal(lnim.header.get_slope_inter(), (None, None)) assert_equal((lnim.dataobj.slope, lnim.dataobj.inter), (2, 8)) @@ -1354,11 +1354,11 @@ def test_load(self): with InTemporaryDirectory(): for img in (simg, pimg): save(img, 'test.nii') - assert_array_equal(arr, load('test.nii').get_data()) + assert_array_equal(arr, load('test.nii').get_fdata()) save(simg, 'test.img') - assert_array_equal(arr, load('test.img').get_data()) + assert_array_equal(arr, load('test.img').get_fdata()) save(simg, 'test.hdr') - assert_array_equal(arr, load('test.hdr').get_data()) + assert_array_equal(arr, load('test.hdr').get_fdata()) def test_float_int_min_max(self): # Conversion between float and int @@ -1370,7 +1370,7 @@ def test_float_int_min_max(self): for out_dt in IUINT_TYPES: img = self.single_class(arr, aff) img_back = bytesio_round_trip(img) - arr_back_sc = img_back.get_data() + arr_back_sc = img_back.get_fdata() assert_true(np.allclose(arr, arr_back_sc)) def test_float_int_spread(self): @@ -1384,7 +1384,7 @@ def test_float_int_spread(self): for out_dt in IUINT_TYPES: img = self.single_class(arr_t, aff) img_back = bytesio_round_trip(img) - arr_back_sc = img_back.get_data() + arr_back_sc = img_back.get_fdata() slope, inter = img_back.header.get_slope_inter() # Get estimate for error max_miss = rt_err_estimate(arr_t, arr_back_sc.dtype, slope, @@ -1407,7 +1407,7 @@ def test_rt_bias(self): for out_dt in IUINT_TYPES: img = self.single_class(arr_t, aff) img_back = bytesio_round_trip(img) - arr_back_sc = img_back.get_data() + arr_back_sc = img_back.get_fdata() slope, inter = img_back.header.get_slope_inter() bias = np.mean(arr_t - arr_back_sc) # Get estimate for error @@ -1457,7 +1457,7 @@ def test_large_nifti1(): with InTemporaryDirectory(): img.to_filename('test.nii.gz') del img - data = load('test.nii.gz').get_data() + data = load('test.nii.gz').get_fdata() # Check that the data are all ones assert_equal(image_shape, data.shape) n_ones = np.sum((data == 1.)) diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 917bc417c6..940d8864e5 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -764,13 +764,13 @@ def test_varying_scaling(): scaled_arr[:, :, i] *= slopes[i] scaled_arr[:, :, i] += inters[i] assert_almost_equal(np.reshape(scaled_arr, img.shape, order='F'), - img.get_data(), 9) + img.get_fdata(), 9) # Check fp scaling for i in range(arr.shape[2]): scaled_arr[:, :, i] /= (slopes[i] * sc_slopes[i]) dv_img = PARRECImage.load(VARY_REC, scaling='fp') assert_almost_equal(np.reshape(scaled_arr, img.shape, order='F'), - dv_img.get_data(), 9) + dv_img.get_fdata(), 9) def test_anonymized(): diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index a09bd4cd85..0e1dbb83c7 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -411,7 +411,7 @@ def test_against_spm_resample(): func = nib.load(pjoin(DATA_DIR, 'functional.nii')) some_rotations = euler2mat(0.1, 0.2, 0.3) extra_affine = from_matvec(some_rotations, [3, 4, 5]) - moved_anat = nib.Nifti1Image(anat.get_data().astype(float), + moved_anat = nib.Nifti1Image(anat.get_fdata(), extra_affine.dot(anat.affine), anat.header) one_func = nib.Nifti1Image(func.dataobj[..., 0], diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 58ad5fa5d2..494ab4b556 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -377,7 +377,7 @@ class TestEcatAPI(_TestProxyAPI): def obj_params(self): eg_path = pjoin(DATA_PATH, self.eg_fname) img = ecat.load(eg_path) - arr_out = img.get_data() + arr_out = img.get_fdata() def eg_func(): img = ecat.load(eg_path) @@ -398,7 +398,7 @@ class TestPARRECAPI(_TestProxyAPI): def _func_dict(self, rec_name): img = parrec.load(rec_name) - arr_out = img.get_data() + arr_out = img.get_fdata() def eg_func(): img = parrec.load(rec_name) diff --git a/nibabel/tests/test_round_trip.py b/nibabel/tests/test_round_trip.py index d216a03cdd..5c3a12b086 100644 --- a/nibabel/tests/test_round_trip.py +++ b/nibabel/tests/test_round_trip.py @@ -25,7 +25,7 @@ def round_trip(arr, out_dtype): img.to_file_map() back = Nifti1Image.from_file_map(img.file_map) # Recover array and calculated scaling from array proxy object - return back.get_data(), back.dataobj.slope, back.dataobj.inter + return back.get_fdata(), back.dataobj.slope, back.dataobj.inter def check_params(in_arr, in_type, out_type): diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 99e9c546f0..6d46e57c5c 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -216,7 +216,7 @@ def check_conversion(cmd, pr_data, out_fname): img = load(out_fname) # Check orientations always LAS assert_equal(aff2axcodes(img.affine), tuple('LAS')) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], data.min())) assert_true(np.allclose(img.header['cal_max'], data.max())) @@ -224,21 +224,21 @@ def check_conversion(cmd, pr_data, out_fname): # Check minmax options run_command(cmd + ['--minmax', '1', '2']) img = load(out_fname) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], 1)) assert_true(np.allclose(img.header['cal_max'], 2)) del img, data # for windows run_command(cmd + ['--minmax', 'parse', '2']) img = load(out_fname) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], data.min())) assert_true(np.allclose(img.header['cal_max'], 2)) del img, data # for windows run_command(cmd + ['--minmax', '1', 'parse']) img = load(out_fname) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], 1)) assert_true(np.allclose(img.header['cal_max'], data.max())) @@ -260,7 +260,7 @@ def test_parrec2nii(): assert_equal(img.shape, eg_dict['shape']) assert_dt_equal(img.get_data_dtype(), eg_dict['dtype']) # Check against values from Philips converted nifti image - data = img.get_data() + data = img.get_fdata() assert_data_similar(data, eg_dict) assert_almost_equal(img.header.get_zooms(), eg_dict['zooms']) # Standard save does not save extensions @@ -273,7 +273,7 @@ def test_parrec2nii(): assert_equal(code, 1) # Default scaling is dv pr_img = load(fname) - flipped_data = flip_axis(pr_img.get_data(), 1) + flipped_data = flip_axis(pr_img.get_fdata(), 1) base_cmd = ['parrec2nii', '--overwrite', fname] check_conversion(base_cmd, flipped_data, out_froot) check_conversion(base_cmd + ['--scaling=dv'], @@ -281,7 +281,7 @@ def test_parrec2nii(): out_froot) # fp pr_img = load(fname, scaling='fp') - flipped_data = flip_axis(pr_img.get_data(), 1) + flipped_data = flip_axis(pr_img.get_fdata(), 1) check_conversion(base_cmd + ['--scaling=fp'], flipped_data, out_froot) @@ -356,7 +356,7 @@ def test_parrec2nii_with_data(): bvals_trace = np.loadtxt('DTI.bvals') assert_almost_equal(bvals_trace, DTI_PAR_BVALS) img = load('DTI.nii') - data = img.get_data().copy() + data = img.get_fdata() del img # Bvecs in header, transposed from PSL to LPS bvecs_LPS = DTI_PAR_BVECS[:, [2, 0, 1]] @@ -384,7 +384,7 @@ def test_parrec2nii_with_data(): img = load('DTI.nii') bvecs_notrace = np.loadtxt('DTI.bvecs').T bvals_notrace = np.loadtxt('DTI.bvals') - data_notrace = img.get_data().copy() + data_notrace = img.get_fdata() assert_equal(data_notrace.shape[-1], len(bvecs_notrace)) del img # ensure correct volume was removed @@ -399,7 +399,7 @@ def test_parrec2nii_with_data(): # strict-sort: bvals should be in ascending order assert_almost_equal(np.loadtxt('DTI.bvals'), np.sort(DTI_PAR_BVALS)) img = load('DTI.nii') - data_sorted = img.get_data().copy() + data_sorted = img.get_fdata() assert_almost_equal(data[..., np.argsort(DTI_PAR_BVALS)], data_sorted) del img diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index b0f571023d..54633c9820 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -22,7 +22,7 @@ from unittest import TestCase from nose.tools import (assert_true, assert_false, assert_equal, assert_not_equal, assert_raises) -from numpy.testing import assert_array_equal, assert_array_almost_equal +from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_warns from .test_helpers import bytesio_round_trip from ..testing import (clear_and_catch_warnings, suppress_warnings, @@ -235,7 +235,7 @@ def test_images(self): # See https://github.com/nipy/nibabel/issues/58 arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) img = self.image_class(arr, None) - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) assert_equal(img.affine, None) def test_default_header(self): @@ -252,9 +252,9 @@ def test_data_api(self): img = self.image_class(DataLike(), None) # Shape may be promoted to higher dimension, but may not reorder or # change size - assert_array_equal(img.get_data().flatten(), np.arange(3)) - assert_equal(img.get_shape()[:1], (3,)) - assert_equal(np.prod(img.get_shape()), 3) + assert_array_equal(img.get_fdata().flatten(), np.arange(3)) + assert_equal(img.shape[:1], (3,)) + assert_equal(np.prod(img.shape), 3) def check_dtypes(self, expected, actual): # Some images will want dtypes to be equal including endianness, @@ -389,9 +389,10 @@ def test_get_data(self): "Cannot slice image objects; consider using " "`img.slicer[slice]` to generate a sliced image (see " "documentation for caveats) or slicing image array data " - "with `img.dataobj[slice]` or `img.get_data()[slice]`") + "with `img.dataobj[slice]` or `img.get_fdata()[slice]`") assert_true(in_data is img.dataobj) - out_data = img.get_data() + with assert_warns(DeprecationWarning): + out_data = img.get_data() assert_true(in_data is out_data) # and that uncache has no effect img.uncache() @@ -403,15 +404,19 @@ def test_get_data(self): rt_img = bytesio_round_trip(img) assert_false(in_data is rt_img.dataobj) assert_array_equal(rt_img.dataobj, in_data) - out_data = rt_img.get_data() + with assert_warns(DeprecationWarning): + out_data = rt_img.get_data() assert_array_equal(out_data, in_data) assert_false(rt_img.dataobj is out_data) # cache - assert_true(rt_img.get_data() is out_data) + with assert_warns(DeprecationWarning): + assert_true(rt_img.get_data() is out_data) out_data[:] = 42 rt_img.uncache() - assert_false(rt_img.get_data() is out_data) - assert_array_equal(rt_img.get_data(), in_data) + with assert_warns(DeprecationWarning): + assert_false(rt_img.get_data() is out_data) + with assert_warns(DeprecationWarning): + assert_array_equal(rt_img.get_data(), in_data) def test_slicer(self): img_klass = self.image_class @@ -534,10 +539,14 @@ def test_slicer(self): pass else: sliced_data = in_data[sliceobj] - assert_array_equal(sliced_data, sliced_img.get_data()) + with assert_warns(DeprecationWarning): + assert_array_equal(sliced_data, sliced_img.get_data()) + assert_array_equal(sliced_data, sliced_img.get_fdata()) assert_array_equal(sliced_data, sliced_img.dataobj) assert_array_equal(sliced_data, img.dataobj[sliceobj]) - assert_array_equal(sliced_data, img.get_data()[sliceobj]) + with assert_warns(DeprecationWarning): + assert_array_equal(sliced_data, img.get_data()[sliceobj]) + assert_array_equal(sliced_data, img.get_fdata()[sliceobj]) def test_api_deprecations(self): @@ -632,7 +641,7 @@ def test_load_mmap(self): if mmap is not None: kwargs['mmap'] = mmap back_img = func(param1, **kwargs) - back_data = back_img.get_data() + back_data = np.asanyarray(back_img.dataobj) if expected_mode is None: assert_false(isinstance(back_data, np.memmap), 'Should not be a %s' % img_klass.__name__) diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 137d3b0451..86143f35ab 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -187,7 +187,7 @@ def assert_null_scaling(self, arr, slope, inter): img = img_class(arr, np.eye(4), input_hdr) img_hdr = img.header self._set_raw_scaling(input_hdr, slope, inter) - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) # Scaling has no effect on image as written via header (with rescaling # turned off). fm = bytesio_filemap(img) @@ -196,12 +196,12 @@ def assert_null_scaling(self, arr, slope, inter): img_hdr.write_to(hdr_fobj) img_hdr.data_to_fileobj(arr, img_fobj, rescale=False) raw_rt_img = img_class.from_file_map(fm) - assert_array_equal(raw_rt_img.get_data(), arr) + assert_array_equal(raw_rt_img.get_fdata(), arr) # Scaling makes no difference for image round trip fm = bytesio_filemap(img) img.to_file_map(fm) rt_img = img_class.from_file_map(fm) - assert_array_equal(rt_img.get_data(), arr) + assert_array_equal(rt_img.get_fdata(), arr) def test_header_scaling(self): # For images that implement scaling, test effect of scaling @@ -258,20 +258,20 @@ def _check_write_scaling(self, img = img_class(arr, aff) self.assert_scale_me_scaling(img.header) # Array from image unchanged by scaling - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) # As does round trip img_rt = bytesio_round_trip(img) self.assert_scale_me_scaling(img_rt.header) # Round trip array is not scaled - assert_array_equal(img_rt.get_data(), arr) + assert_array_equal(img_rt.get_fdata(), arr) # Explicit scaling causes scaling after round trip self._set_raw_scaling(img.header, slope, inter) self.assert_scaling_equal(img.header, slope, inter) # Array from image unchanged by scaling - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) # But the array scaled after round trip img_rt = bytesio_round_trip(img) - assert_array_equal(img_rt.get_data(), + assert_array_equal(img_rt.get_fdata(), apply_read_scaling(arr, effective_slope, effective_inter)) @@ -289,7 +289,7 @@ def _check_write_scaling(self, img.header.set_data_dtype(np.uint8) with np.errstate(invalid='ignore'): img_rt = bytesio_round_trip(img) - assert_array_equal(img_rt.get_data(), + assert_array_equal(img_rt.get_fdata(), apply_read_scaling(np.round(arr), effective_slope, effective_inter)) @@ -299,7 +299,7 @@ def _check_write_scaling(self, with np.errstate(invalid='ignore'): img_rt = bytesio_round_trip(img) exp_unscaled_arr = np.clip(np.round(arr), 0, 255) - assert_array_equal(img_rt.get_data(), + assert_array_equal(img_rt.get_fdata(), apply_read_scaling(exp_unscaled_arr, effective_slope, effective_inter)) @@ -313,7 +313,7 @@ def test_int_int_scaling(self): img.set_data_dtype(np.uint8) self._set_raw_scaling(hdr, 1, 0 if hdr.has_data_intercept else None) img_rt = bytesio_round_trip(img) - assert_array_equal(img_rt.get_data(), np.clip(arr, 0, 255)) + assert_array_equal(img_rt.get_fdata(), np.clip(arr, 0, 255)) def test_no_scaling(self): # Test writing image converting types when not calculating scaling @@ -337,7 +337,7 @@ def test_no_scaling(self): with np.errstate(invalid='ignore'): rt_img = bytesio_round_trip(img) with suppress_warnings(): # invalid mult - back_arr = rt_img.get_data() + back_arr = np.asanyarray(rt_img.dataobj) exp_back = arr.copy() # If converting to floating point type, casting is direct. # Otherwise we will need to do float-(u)int casting at some point @@ -392,13 +392,13 @@ def test_nan2zero_range_ok(self): arr[1, 0, 0] = 256 # to push outside uint8 range img = img_class(arr, np.eye(4)) rt_img = bytesio_round_trip(img) - assert_array_equal(rt_img.get_data(), arr) + assert_array_equal(rt_img.get_fdata(), arr) # Uncontroversial so far, but now check that nan2zero works correctly # for int type img.set_data_dtype(np.uint8) with np.errstate(invalid='ignore'): rt_img = bytesio_round_trip(img) - assert_equal(rt_img.get_data()[0, 0, 0], 0) + assert_equal(rt_img.get_fdata()[0, 0, 0], 0) class TestSpm99AnalyzeImage(test_analyze.TestAnalyzeImage, ImageScalingMixin): @@ -450,7 +450,7 @@ def test_mat_read(self): # Test round trip img.to_file_map() r_img = img_klass.from_file_map(fm) - assert_array_equal(r_img.get_data(), arr) + assert_array_equal(r_img.get_fdata(), arr) assert_array_equal(r_img.affine, aff) # mat files are for matlab and have 111 voxel origins. We need to # adjust for that, when loading and saving. Check for signs of that in @@ -478,7 +478,7 @@ def test_mat_read(self): dict(M=np.diag([3, 4, 5, 1]), mat=np.diag([6, 7, 8, 1]))) # Check we are preferring the 'mat' matrix r_img = img_klass.from_file_map(fm) - assert_array_equal(r_img.get_data(), arr) + assert_array_equal(r_img.get_fdata(), arr) assert_array_equal(r_img.affine, np.dot(np.diag([6, 7, 8, 1]), to_111)) # But will use M if present @@ -486,7 +486,7 @@ def test_mat_read(self): mat_fileobj.truncate(0) savemat(mat_fileobj, dict(M=np.diag([3, 4, 5, 1]))) r_img = img_klass.from_file_map(fm) - assert_array_equal(r_img.get_data(), arr) + assert_array_equal(r_img.get_fdata(), arr) assert_array_equal(r_img.affine, np.dot(np.diag([3, 4, 5, 1]), np.dot(flipper, to_111))) From 783a0d0f9cb35245da0f5a5cddcfd4769c69a3b2 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 16 Sep 2019 07:58:18 -0400 Subject: [PATCH 267/689] DOC: Update test_image_api docstring for clarity, consistency --- nibabel/tests/test_image_api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index b27665523d..748f9c2472 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -14,10 +14,10 @@ * ``img.shape`` (shape of data as read with ``np.array(img.dataobj)`` * ``img.get_fdata()`` (returns floating point data as read with ``np.array(img.dataobj)`` and the cast to float); -* ``img.uncache()`` (``img.get_fdata()`` and ``img.get_data()`` (deprecated) are - allowed to cache the result of the array creation. If they do, this call empties - that cache. Implement this as a no-op if ``get_fdata()``, ``get_data`` do not - cache. +* ``img.uncache()`` (``img.get_fdata()`` (recommended) and ``img.get_data()`` + (deprecated) are allowed to cache the result of the array creation. If they + do, this call empties that cache. Implement this as a no-op if + ``get_fdata()``, ``get_data()`` do not cache.) * ``img[something]`` generates an informative TypeError * ``img.in_memory`` is True for an array image, and for a proxy image that is cached, but False otherwise. From 33466894059fd45d5e23373ac7e3034e35995673 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Mon, 16 Sep 2019 10:42:00 -0700 Subject: [PATCH 268/689] Add fn to coerce pathlike to string --- nibabel/loadsave.py | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index fabeb55c60..515ffdec34 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -11,6 +11,7 @@ import os import numpy as np +import pathlib from .filename_parser import splitext_addext from .openers import ImageOpener @@ -19,6 +20,33 @@ from .arrayproxy import is_proxy from .deprecated import deprecate_with_version +def _stringify_path(filepath_or_buffer): + """Attempt to convert a path-like object to a string. + + Parameters + ---------- + filepath_or_buffer : object to be converted + Returns + ------- + str_filepath_or_buffer : maybe a string version of the object + Notes + ----- + Objects supporting the fspath protocol (python 3.6+) are coerced + according to its __fspath__ method. + For backwards compatibility with older pythons, pathlib.Path and + py.path objects are specially coerced. + Any other object is passed through unchanged, which includes bytes, + strings, buffers, or anything else that's not even path-like. + + Copied from: + https://github.com/pandas-dev/pandas/blob/325dd686de1589c17731cf93b649ed5ccb5a99b4/pandas/io/common.py#L131-L160 + """ + if hasattr(filepath_or_buffer, "__fspath__"): + return filepath_or_buffer.__fspath__() + elif isinstance(filepath_or_buffer, pathlib.Path): + return str(filepath_or_buffer) + return filepath_or_buffer + def load(filename, **kwargs): ''' Load file given filename, guessing at file type @@ -35,11 +63,6 @@ def load(filename, **kwargs): img : ``SpatialImage`` Image of guessed type ''' - #Coerce pathlib objects - if hasattr(filename, '__fspath__'): - filename = filename.__fspath__() - else: - filename = str(filename) #Check file exists and is not empty try: From aaf1a198906037e8feeb4c44c24d7987ca4b8320 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Mon, 16 Sep 2019 10:54:27 -0700 Subject: [PATCH 269/689] Insert _stringify_path to entry points --- nibabel/filebasedimages.py | 4 ++++ nibabel/filename_parser.py | 5 +++++ nibabel/loadsave.py | 2 ++ 3 files changed, 11 insertions(+) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 64b79550e3..3d2877aa82 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -15,6 +15,7 @@ splitext_addext) from .openers import ImageOpener from .deprecated import deprecate_with_version +from .loadsave import _stringify_path class ImageFileError(Exception): @@ -252,10 +253,12 @@ def set_filename(self, filename): ``.file_map`` attribute. Otherwise, the image instance will try and guess the other filenames from this given filename. ''' + filename = _stringify_path(filename) self.file_map = self.__class__.filespec_to_file_map(filename) @classmethod def from_filename(klass, filename): + filename = _stringify_path(filename) file_map = klass.filespec_to_file_map(filename) return klass.from_file_map(file_map) @@ -330,6 +333,7 @@ def to_filename(self, filename): ------- None ''' + filename = _stringify_path(filename) self.file_map = self.filespec_to_file_map(filename) self.to_file_map() diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index db6e073018..8c9ed07892 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -14,6 +14,7 @@ except NameError: basestring = str +from .loadsave import _stringify_path class TypesFilenamesError(Exception): pass @@ -190,6 +191,8 @@ def parse_filename(filename, >>> parse_filename('/path/fnameext2.gz', types_exts, ('.gz',)) ('/path/fname', 'ext2', '.gz', 't2') ''' + filename = _stringify_path(filename) + ignored = None if match_case: endswith = _endswith @@ -257,6 +260,8 @@ def splitext_addext(filename, >>> splitext_addext('fname.ext.foo', ('.foo', '.bar')) ('fname', '.ext', '.foo') ''' + filename = _stringify_path(filename) + if match_case: endswith = _endswith else: diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 515ffdec34..5e7d199545 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -63,6 +63,7 @@ def load(filename, **kwargs): img : ``SpatialImage`` Image of guessed type ''' + filename = _stringify_path(filename) #Check file exists and is not empty try: @@ -123,6 +124,7 @@ def save(img, filename): ------- None ''' + filename = _stringify_path(filename) # Save the type as expected try: From b7a3e96e4ebc5392cc43b0b3f70fde0187d02cc0 Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Tue, 17 Sep 2019 10:48:07 -0500 Subject: [PATCH 270/689] ignore the CSA string if it can't be read --- nibabel/nicom/csareader.py | 2 +- nibabel/nicom/dicomwrappers.py | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index de2b5dbb1a..9efc5fe12f 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -100,7 +100,7 @@ def read(csa_str): csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= %d' % MAX_CSA_ITEMS) + '0 < t <= %d. Instead found %d tags.' % (MAX_CSA_ITEMS, csa_dict['n_tags'])) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ up_str.unpack('64si4s3i') diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index cd0c1daf67..d9076eaf68 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -79,7 +79,12 @@ def wrapper_from_data(dcm_data): return MultiframeWrapper(dcm_data) # Check for Siemens DICOM format types # Only Siemens will have data for the CSA header - csa = csar.get_csa_header(dcm_data) + try: + csa = csar.get_csa_header(dcm_data) + except csar.CSAReadError as e: + warnings.warn('Error while attempting to read CSA header: '+ + str(e.args) + '\n ignoring Siemens private (CSA) header info.') + csa = None if csa is None: return Wrapper(dcm_data) if csar.is_mosaic(csa): From 185f82c69640195cb64eba9e73e93e07d8eedda8 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Tue, 17 Sep 2019 11:21:18 -0700 Subject: [PATCH 271/689] Refactor _stringify_path to filename_parser.py --- nibabel/filebasedimages.py | 3 +-- nibabel/filename_parser.py | 29 ++++++++++++++++++++++++++++- nibabel/loadsave.py | 28 +--------------------------- 3 files changed, 30 insertions(+), 30 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 3d2877aa82..4f84e0c1b4 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -12,10 +12,9 @@ from copy import deepcopy from .fileholders import FileHolder from .filename_parser import (types_filenames, TypesFilenamesError, - splitext_addext) + splitext_addext, _stringify_path) from .openers import ImageOpener from .deprecated import deprecate_with_version -from .loadsave import _stringify_path class ImageFileError(Exception): diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index 8c9ed07892..8f69c4a843 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -14,11 +14,38 @@ except NameError: basestring = str -from .loadsave import _stringify_path +import pathlib class TypesFilenamesError(Exception): pass +def _stringify_path(filepath_or_buffer): + """Attempt to convert a path-like object to a string. + + Parameters + ---------- + filepath_or_buffer : object to be converted + Returns + ------- + str_filepath_or_buffer : maybe a string version of the object + Notes + ----- + Objects supporting the fspath protocol (python 3.6+) are coerced + according to its __fspath__ method. + For backwards compatibility with older pythons, pathlib.Path and + py.path objects are specially coerced. + Any other object is passed through unchanged, which includes bytes, + strings, buffers, or anything else that's not even path-like. + + Copied from: + https://github.com/pandas-dev/pandas/blob/325dd686de1589c17731cf93b649ed5ccb5a99b4/pandas/io/common.py#L131-L160 + """ + if hasattr(filepath_or_buffer, "__fspath__"): + return filepath_or_buffer.__fspath__() + elif isinstance(filepath_or_buffer, pathlib.Path): + return str(filepath_or_buffer) + return filepath_or_buffer + def types_filenames(template_fname, types_exts, trailing_suffixes=('.gz', '.bz2'), diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 5e7d199545..015973914c 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -11,41 +11,15 @@ import os import numpy as np -import pathlib -from .filename_parser import splitext_addext +from .filename_parser import splitext_addext, _stringify_path from .openers import ImageOpener from .filebasedimages import ImageFileError from .imageclasses import all_image_classes from .arrayproxy import is_proxy from .deprecated import deprecate_with_version -def _stringify_path(filepath_or_buffer): - """Attempt to convert a path-like object to a string. - Parameters - ---------- - filepath_or_buffer : object to be converted - Returns - ------- - str_filepath_or_buffer : maybe a string version of the object - Notes - ----- - Objects supporting the fspath protocol (python 3.6+) are coerced - according to its __fspath__ method. - For backwards compatibility with older pythons, pathlib.Path and - py.path objects are specially coerced. - Any other object is passed through unchanged, which includes bytes, - strings, buffers, or anything else that's not even path-like. - - Copied from: - https://github.com/pandas-dev/pandas/blob/325dd686de1589c17731cf93b649ed5ccb5a99b4/pandas/io/common.py#L131-L160 - """ - if hasattr(filepath_or_buffer, "__fspath__"): - return filepath_or_buffer.__fspath__() - elif isinstance(filepath_or_buffer, pathlib.Path): - return str(filepath_or_buffer) - return filepath_or_buffer def load(filename, **kwargs): From 664d05faea3042e9ac7babd502095c5c38c58d4b Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Wed, 18 Sep 2019 11:02:25 -0500 Subject: [PATCH 272/689] fix a typo --- nibabel/nicom/dicomwrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index d9076eaf68..2bd3345366 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -83,7 +83,7 @@ def wrapper_from_data(dcm_data): csa = csar.get_csa_header(dcm_data) except csar.CSAReadError as e: warnings.warn('Error while attempting to read CSA header: '+ - str(e.args) + '\n ignoring Siemens private (CSA) header info.') + str(e.args) + '\n Ignoring Siemens private (CSA) header info.') csa = None if csa is None: return Wrapper(dcm_data) From 08b5f0f8c4970e70b16354a550336d424900c2ca Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 19 Sep 2019 16:29:29 -0400 Subject: [PATCH 273/689] DOC: Update changelog for upcoming 2.5.1 release --- Changelog | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/Changelog b/Changelog index 832e7d0843..fb004c93e3 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,33 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. +2.5.1 (Monday 23 September 2019) +================================ + +Enhancements +------------ +* Ignore endianness in ``nib-diff`` if values match (pr/799) (YOH, reviewed + by CM) + +Bug fixes +--------- +* Correctly handle Philips DICOMs w/ derived volume (pr/795) (Mathias + Goncalves, reviewed by CM) +* Raise CSA tag limit to 1000, parametrize for future relaxing (pr/798, + backported to 2.5.x in pr/800) (Henry Braun, reviewed by CM, MB) +* Coerce data types to match NIfTI intent codes when writing GIFTI data + arrays (pr/806) (CM, reported by Tom Holroyd) + +Maintenance +----------- +* Require h5py 2.10 for Windows + Python < 3.6 to resolve unexpected dtypes + in Minc2 data (pr/804) (CM, reviewed by YOH) + +API changes and deprecations +---------------------------- +* Deprecate ``nicom.dicomwrappers.Wrapper.get_affine()`` in favor of ``affine`` + property; final removal in nibabel 4.0 (pr/796) (YOH, reviewed by CM) + 2.5.0 (Sunday 4 August 2019) ============================ From d8941fc3b1f6057f67aecd6355844f7bda182e94 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 15 Sep 2019 10:26:37 -0400 Subject: [PATCH 274/689] MAINT: Version 2.5.1 --- nibabel/info.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index 12d0dc9d7e..c45e36c8e6 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -19,8 +19,8 @@ _version_major = 2 _version_minor = 5 _version_micro = 1 -_version_extra = 'dev' -# _version_extra = '' +# _version_extra = 'dev' +_version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From bcb4dcc618db5dc0f632e04d71034dc4a281ddc9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 15 Sep 2019 10:28:05 -0400 Subject: [PATCH 275/689] MAINT: Update zenodo.json --- .zenodo.json | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 16555380a4..a5ce5075aa 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -87,6 +87,11 @@ { "name": "Moloney, Brendan" }, + { + "affiliation": "MIT", + "name": "Goncalves, Mathias", + "orcid": "0000-0002-7252-7771" + }, { "name": "Burns, Christopher" }, @@ -130,11 +135,6 @@ { "name": "Baker, Eric M." }, - { - "affiliation": "MIT", - "name": "Goncalves, Mathias", - "orcid": "0000-0002-7252-7771" - }, { "name": "Hayashi, Soichi" }, @@ -244,6 +244,9 @@ "name": "P\u00e9rez-Garc\u00eda, Fernando", "orcid": "0000-0001-9090-3024" }, + { + "name": "Braun, Henry" + }, { "name": "Solovey, Igor" }, From 28d460ea61560d0f20b3014e1fe03e5e688d80f6 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 15 Sep 2019 10:36:20 -0400 Subject: [PATCH 276/689] DOC: Add Henry Braun to contributor list --- doc/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index ce1db32b86..09f09d4883 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -98,6 +98,7 @@ contributed code and discussion (in rough order of appearance): * Matt Cieslak * Egor Pafilov * Jath Palasubramaniam +* Henry Braun License reprise =============== From 1ab44e7298282fcd718007321439725034100131 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 15 Sep 2019 10:55:59 -0400 Subject: [PATCH 277/689] MAINT: Update setup.cfg nose version to match installation instructions --- setup.cfg | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 69bd84afe7..08e8727424 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,7 +34,8 @@ install_requires = numpy >=1.8 six >=1.3 bz2file ; python_version < "3.0" -tests_require = nose +tests_require = + nose >=0.11 test_suite = nose.collector zip_safe = False packages = find: @@ -46,7 +47,7 @@ dicom = doc = sphinx >=0.3 test = - nose >=0.10.1 + nose >=0.11 all = %(dicom)s %(doc)s From 248b946b5d956b4b80f84006b26630620084d1cc Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Thu, 19 Sep 2019 16:23:03 -0500 Subject: [PATCH 278/689] add @hbraunDSP affiliation and ORCID --- .zenodo.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.zenodo.json b/.zenodo.json index a5ce5075aa..9b4621464f 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -245,7 +245,9 @@ "orcid": "0000-0001-9090-3024" }, { - "name": "Braun, Henry" + "affiliation": "Center for Magnetic Resonance Research, University of Minnesota", + "name": "Braun, Henry", + "orcid": "0000-0001-7003-9822" }, { "name": "Solovey, Igor" From 3cfff0d14f5d36950a2661bb262c52ffabc66bac Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 19 Sep 2019 21:31:47 -0700 Subject: [PATCH 279/689] ENH: Add an utility to calculate obliquity of affines This implementation mimics the implementation of AFNI. --- nibabel/affines.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/nibabel/affines.py b/nibabel/affines.py index a7d7a4e9b8..03ab8b80e8 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -3,6 +3,7 @@ """ Utility routines for working with points and affine transforms """ +from math import acos, pi as PI import numpy as np from functools import reduce @@ -296,3 +297,24 @@ def voxel_sizes(affine): """ top_left = affine[:-1, :-1] return np.sqrt(np.sum(top_left ** 2, axis=0)) + + +def obliquity(affine): + r""" + Estimate the obliquity an affine's axes represent, in degrees from plumb. + + This implementation is inspired by `AFNI's implementation + `_ + + >>> affine = np.array([ + ... [2.74999725e+00, -2.74999817e-03, 2.74999954e-03, -7.69847980e+01], + ... [2.98603540e-03, 2.73886840e+00, -2.47165887e-01, -8.36692043e+01], + ... [-2.49170222e-03, 2.47168626e-01, 2.73886865e+00, -8.34056848e+01], + ... [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]]) + >>> abs(5.15699 - obliquity(affine)) < 0.0001 + True + + """ + vs = voxel_sizes(affine) + fig_merit = (affine[:-1, :-1] / vs[np.newaxis, ...]).max(axis=1).min() + return abs(acos(fig_merit) * 180 / PI) From c92d5609f37ac2ec18f94606b8bb3075759e27ea Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 20 Sep 2019 11:26:42 -0700 Subject: [PATCH 280/689] enh(tests): add not-oblique test, move tests to ``test_affines.py`` Addressing one of @matthew-brett's comments. --- nibabel/affines.py | 8 -------- nibabel/tests/test_affines.py | 12 +++++++++++- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index 03ab8b80e8..bc800fd318 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -306,14 +306,6 @@ def obliquity(affine): This implementation is inspired by `AFNI's implementation `_ - >>> affine = np.array([ - ... [2.74999725e+00, -2.74999817e-03, 2.74999954e-03, -7.69847980e+01], - ... [2.98603540e-03, 2.73886840e+00, -2.47165887e-01, -8.36692043e+01], - ... [-2.49170222e-03, 2.47168626e-01, 2.73886865e+00, -8.34056848e+01], - ... [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]]) - >>> abs(5.15699 - obliquity(affine)) < 0.0001 - True - """ vs = voxel_sizes(affine) fig_merit = (affine[:-1, :-1] / vs[np.newaxis, ...]).max(axis=1).min() diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index e66ed46190..143c80312b 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -7,7 +7,7 @@ from ..eulerangles import euler2mat from ..affines import (AffineError, apply_affine, append_diag, to_matvec, - from_matvec, dot_reduce, voxel_sizes) + from_matvec, dot_reduce, voxel_sizes, obliquity) from nose.tools import assert_equal, assert_raises @@ -178,3 +178,13 @@ def test_voxel_sizes(): rot_affine[:3, :3] = rotation full_aff = rot_affine.dot(aff) assert_almost_equal(voxel_sizes(full_aff), vox_sizes) + + +def test_obliquity(): + """Check the calculation of inclination of an affine axes.""" + aligned = np.diag([2.0, 2.0, 2.3, 1.0]) + aligned[:-1, -1] = [-10, -10, -7] + R = from_matvec(euler2mat(x=0.09, y=0.001, z=0.001), [0.0, 0.0, 0.0]) + oblique = R.dot(aligned) + assert_almost_equal(obliquity(aligned), 0.0) + assert_almost_equal(obliquity(oblique), 5.1569948883) From da8da564adfccee6e87ec3695b9ce0aa55fa1be9 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 21 Sep 2019 17:46:12 -0700 Subject: [PATCH 281/689] enh: return radians unless degrees=True --- nibabel/affines.py | 7 +++++-- nibabel/tests/test_affines.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index bc800fd318..462798b6f1 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -299,7 +299,7 @@ def voxel_sizes(affine): return np.sqrt(np.sum(top_left ** 2, axis=0)) -def obliquity(affine): +def obliquity(affine, degrees=False): r""" Estimate the obliquity an affine's axes represent, in degrees from plumb. @@ -309,4 +309,7 @@ def obliquity(affine): """ vs = voxel_sizes(affine) fig_merit = (affine[:-1, :-1] / vs[np.newaxis, ...]).max(axis=1).min() - return abs(acos(fig_merit) * 180 / PI) + radians = abs(acos(fig_merit)) + if not degrees: + return radians + return radians * 180 / PI diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index 143c80312b..db4fbe0630 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -187,4 +187,4 @@ def test_obliquity(): R = from_matvec(euler2mat(x=0.09, y=0.001, z=0.001), [0.0, 0.0, 0.0]) oblique = R.dot(aligned) assert_almost_equal(obliquity(aligned), 0.0) - assert_almost_equal(obliquity(oblique), 5.1569948883) + assert_almost_equal(obliquity(oblique, degrees=True), 5.1569948883) From 38342e3acf3c9134bc6eae707af77d931c66bd52 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 23 Sep 2019 12:02:49 -0400 Subject: [PATCH 282/689] MAINT: 2.5.2-dev --- nibabel/info.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index c45e36c8e6..686baeae01 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -18,9 +18,9 @@ # (pre-release) version. _version_major = 2 _version_minor = 5 -_version_micro = 1 -# _version_extra = 'dev' -_version_extra = '' +_version_micro = 2 +_version_extra = 'dev' +# _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, From 253a256d18d75664001ef3d9872c25952675b7f4 Mon Sep 17 00:00:00 2001 From: oesteban Date: Mon, 23 Sep 2019 09:31:42 -0700 Subject: [PATCH 283/689] enh: address @matthew-brett's comments --- nibabel/affines.py | 26 +++++++++++++++++--------- nibabel/tests/test_affines.py | 6 ++++-- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index 462798b6f1..25c9a4cdbd 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -2,8 +2,6 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """ Utility routines for working with points and affine transforms """ - -from math import acos, pi as PI import numpy as np from functools import reduce @@ -299,17 +297,27 @@ def voxel_sizes(affine): return np.sqrt(np.sum(top_left ** 2, axis=0)) -def obliquity(affine, degrees=False): +def obliquity(affine): r""" - Estimate the obliquity an affine's axes represent, in degrees from plumb. + Estimate the *obliquity* an affine's axes represent. + The term *obliquity* is defined here as the rotation of those axes with + respect to the cardinal axes. This implementation is inspired by `AFNI's implementation `_ + Parameters + ---------- + affine : 2D array-like + Affine transformation array. Usually shape (4, 4), but can be any 2D + array. + + Returns + ------- + angles : 1D array-like + The *obliquity* of each axis with respect to the cardinal axes, in radians. + """ vs = voxel_sizes(affine) - fig_merit = (affine[:-1, :-1] / vs[np.newaxis, ...]).max(axis=1).min() - radians = abs(acos(fig_merit)) - if not degrees: - return radians - return radians * 180 / PI + best_cosines = np.abs((affine[:-1, :-1] / vs).max(axis=1)) + return np.arccos(best_cosines) diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index db4fbe0630..13b554c5a8 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -182,9 +182,11 @@ def test_voxel_sizes(): def test_obliquity(): """Check the calculation of inclination of an affine axes.""" + from math import pi aligned = np.diag([2.0, 2.0, 2.3, 1.0]) aligned[:-1, -1] = [-10, -10, -7] R = from_matvec(euler2mat(x=0.09, y=0.001, z=0.001), [0.0, 0.0, 0.0]) oblique = R.dot(aligned) - assert_almost_equal(obliquity(aligned), 0.0) - assert_almost_equal(obliquity(oblique, degrees=True), 5.1569948883) + assert_almost_equal(obliquity(aligned), [0.0, 0.0, 0.0]) + assert_almost_equal(obliquity(oblique) * 180 / pi, + [0.0810285, 5.1569949, 5.1569376]) From e1d78c8e4c5eed24fabab4e9d925985abb695b0b Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Mon, 23 Sep 2019 12:30:59 -0700 Subject: [PATCH 284/689] resolve flake8 --- nibabel/filename_parser.py | 6 ++++-- nibabel/loadsave.py | 6 ++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index 8f69c4a843..f6e23e2dce 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -16,9 +16,11 @@ import pathlib + class TypesFilenamesError(Exception): pass + def _stringify_path(filepath_or_buffer): """Attempt to convert a path-like object to a string. @@ -41,7 +43,7 @@ def _stringify_path(filepath_or_buffer): https://github.com/pandas-dev/pandas/blob/325dd686de1589c17731cf93b649ed5ccb5a99b4/pandas/io/common.py#L131-L160 """ if hasattr(filepath_or_buffer, "__fspath__"): - return filepath_or_buffer.__fspath__() + return filepath_or_buffer.__fspath__() elif isinstance(filepath_or_buffer, pathlib.Path): return str(filepath_or_buffer) return filepath_or_buffer @@ -288,7 +290,7 @@ def splitext_addext(filename, ('fname', '.ext', '.foo') ''' filename = _stringify_path(filename) - + if match_case: endswith = _endswith else: diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 015973914c..ecb6b6cb18 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -20,8 +20,6 @@ from .deprecated import deprecate_with_version - - def load(filename, **kwargs): ''' Load file given filename, guessing at file type @@ -39,14 +37,14 @@ def load(filename, **kwargs): ''' filename = _stringify_path(filename) - #Check file exists and is not empty + # Check file exists and is not empty try: stat_result = os.stat(filename) except OSError: raise FileNotFoundError("No such file or no access: '%s'" % filename) if stat_result.st_size <= 0: raise ImageFileError("Empty file: '%s'" % filename) - + sniff = None for image_klass in all_image_classes: is_valid, sniff = image_klass.path_maybe_image(filename, sniff) From 04584b6d8e0e50928ee7c5ff09de61471d6e5acb Mon Sep 17 00:00:00 2001 From: oesteban Date: Tue, 24 Sep 2019 10:20:19 -0700 Subject: [PATCH 285/689] doc: add link to AFNI's documentation about *obliquity* [skip ci] --- nibabel/affines.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index 25c9a4cdbd..b09257c7ad 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -304,7 +304,9 @@ def obliquity(affine): The term *obliquity* is defined here as the rotation of those axes with respect to the cardinal axes. This implementation is inspired by `AFNI's implementation - `_ + `_. + For further details about *obliquity*, check `AFNI's documentation + _. Parameters ---------- From e9b9a1081f94ed7163edf15e109663cab6925264 Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Tue, 24 Sep 2019 17:00:56 -0500 Subject: [PATCH 286/689] autopep8 dicomwrappers.py --- nibabel/nicom/dicomwrappers.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 9038c96a12..6263c8a6f1 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -80,11 +80,12 @@ def wrapper_from_data(dcm_data): return MultiframeWrapper(dcm_data) # Check for Siemens DICOM format types # Only Siemens will have data for the CSA header - try: + try: csa = csar.get_csa_header(dcm_data) except csar.CSAReadError as e: - warnings.warn('Error while attempting to read CSA header: '+ - str(e.args) + '\n Ignoring Siemens private (CSA) header info.') + warnings.warn('Error while attempting to read CSA header: ' + + str(e.args) + + '\n Ignoring Siemens private (CSA) header info.') csa = None if csa is None: return Wrapper(dcm_data) From 7c84be6845ac42dee6cb48b3e4ed6355c353f71e Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Tue, 24 Sep 2019 17:11:16 -0500 Subject: [PATCH 287/689] fix pep8 problems --- nibabel/nicom/csareader.py | 3 ++- nibabel/nicom/dicomwrappers.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 9efc5fe12f..86c51302e3 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -100,7 +100,8 @@ def read(csa_str): csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= %d. Instead found %d tags.' % (MAX_CSA_ITEMS, csa_dict['n_tags'])) + '0 < t <= %d. Instead found %d tags.' + % (MAX_CSA_ITEMS, csa_dict['n_tags'])) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ up_str.unpack('64si4s3i') diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 6263c8a6f1..f37d0323a8 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -84,7 +84,7 @@ def wrapper_from_data(dcm_data): csa = csar.get_csa_header(dcm_data) except csar.CSAReadError as e: warnings.warn('Error while attempting to read CSA header: ' + - str(e.args) + + str(e.args) + '\n Ignoring Siemens private (CSA) header info.') csa = None if csa is None: From 3efe193a29709444184c265b1575f4216e5cb1a0 Mon Sep 17 00:00:00 2001 From: Henry Braun Date: Tue, 24 Sep 2019 17:14:08 -0500 Subject: [PATCH 288/689] fix pep8 problems --- nibabel/nicom/csareader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 86c51302e3..1764e2878c 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -100,7 +100,7 @@ def read(csa_str): csa_dict['n_tags'], csa_dict['check'] = up_str.unpack('2I') if not 0 < csa_dict['n_tags'] <= MAX_CSA_ITEMS: raise CSAReadError('Number of tags `t` should be ' - '0 < t <= %d. Instead found %d tags.' + '0 < t <= %d. Instead found %d tags.' % (MAX_CSA_ITEMS, csa_dict['n_tags'])) for tag_no in range(csa_dict['n_tags']): name, vm, vr, syngodt, n_items, last3 = \ From 891d85c07b41f4908fb3e9ffb9e89c78883a64eb Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 27 Sep 2019 08:52:01 -0700 Subject: [PATCH 289/689] fix: incorrect operation ordering caught by @matthew-brett --- nibabel/affines.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index b09257c7ad..9a37cc9e49 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -321,5 +321,5 @@ def obliquity(affine): """ vs = voxel_sizes(affine) - best_cosines = np.abs((affine[:-1, :-1] / vs).max(axis=1)) + best_cosines = np.abs(affine[:-1, :-1] / vs).max(axis=1) return np.arccos(best_cosines) From 501cc310cdfbb6e9a74ec16997e91da99feefbca Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 6 Oct 2019 09:35:27 -0400 Subject: [PATCH 290/689] Re-import externals/netcdf.py from scipy --- nibabel/externals/netcdf.py | 404 +++++++++++++++++++++++++++--------- 1 file changed, 303 insertions(+), 101 deletions(-) diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index e485533cd7..b4e7512697 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -10,13 +10,17 @@ NetCDF files. The same API is also used in the PyNIO and pynetcdf modules, allowing these modules to be used interchangeably when working with NetCDF files. + +Only NetCDF3 is supported here; for NetCDF4 see +`netCDF4-python `__, +which has a similar API. + """ from __future__ import division, print_function, absolute_import # TODO: # * properly implement ``_FillValue``. -# * implement Jeff Whitaker's patch for masked variables. # * fix character variables. # * implement PAGESIZE for Python 2.6? @@ -29,20 +33,26 @@ # otherwise the key would be inserted into userspace attributes. -__all__ = ['netcdf_file'] +__all__ = ['netcdf_file', 'netcdf_variable'] +import sys +import warnings +import weakref from operator import mul -from mmap import mmap, ACCESS_READ +from collections import OrderedDict -import numpy as np # noqa -from ..py3k import asbytes, asstr -from numpy import frombuffer, ndarray, dtype, empty, array, asarray +import mmap as mm + +import numpy as np +from numpy.compat import asbytes, asstr +from numpy import frombuffer, dtype, empty, array, asarray from numpy import little_endian as LITTLE_ENDIAN from functools import reduce -from six import integer_types +from six import integer_types, text_type, binary_type +IS_PYPY = ('__pypy__' in sys.modules) ABSENT = b'\x00\x00\x00\x00\x00\x00\x00\x00' ZERO = b'\x00\x00\x00\x00' @@ -55,27 +65,39 @@ NC_DIMENSION = b'\x00\x00\x00\n' NC_VARIABLE = b'\x00\x00\x00\x0b' NC_ATTRIBUTE = b'\x00\x00\x00\x0c' - +FILL_BYTE = b'\x81' +FILL_CHAR = b'\x00' +FILL_SHORT = b'\x80\x01' +FILL_INT = b'\x80\x00\x00\x01' +FILL_FLOAT = b'\x7C\xF0\x00\x00' +FILL_DOUBLE = b'\x47\x9E\x00\x00\x00\x00\x00\x00' TYPEMAP = {NC_BYTE: ('b', 1), - NC_CHAR: ('c', 1), - NC_SHORT: ('h', 2), - NC_INT: ('i', 4), - NC_FLOAT: ('f', 4), - NC_DOUBLE: ('d', 8)} + NC_CHAR: ('c', 1), + NC_SHORT: ('h', 2), + NC_INT: ('i', 4), + NC_FLOAT: ('f', 4), + NC_DOUBLE: ('d', 8)} + +FILLMAP = {NC_BYTE: FILL_BYTE, + NC_CHAR: FILL_CHAR, + NC_SHORT: FILL_SHORT, + NC_INT: FILL_INT, + NC_FLOAT: FILL_FLOAT, + NC_DOUBLE: FILL_DOUBLE} REVERSE = {('b', 1): NC_BYTE, - ('B', 1): NC_CHAR, - ('c', 1): NC_CHAR, - ('h', 2): NC_SHORT, - ('i', 4): NC_INT, - ('f', 4): NC_FLOAT, - ('d', 8): NC_DOUBLE, + ('B', 1): NC_CHAR, + ('c', 1): NC_CHAR, + ('h', 2): NC_SHORT, + ('i', 4): NC_INT, + ('f', 4): NC_FLOAT, + ('d', 8): NC_DOUBLE, - # these come from asarray(1).dtype.char and asarray('foo').dtype.char, - # used when getting the types from generic attributes. - ('l', 4): NC_INT, - ('S', 1): NC_CHAR} + # these come from asarray(1).dtype.char and asarray('foo').dtype.char, + # used when getting the types from generic attributes. + ('l', 4): NC_INT, + ('S', 1): NC_CHAR} class netcdf_file(object): @@ -96,17 +118,22 @@ class netcdf_file(object): ---------- filename : string or file-like string -> filename - mode : {'r', 'w'}, optional - read-write mode, default is 'r' + mode : {'r', 'w', 'a'}, optional + read-write-append mode, default is 'r' mmap : None or bool, optional Whether to mmap `filename` when reading. Default is True when `filename` is a file name, False when `filename` is a - file-like object + file-like object. Note that when mmap is in use, data arrays + returned refer directly to the mmapped data on disk, and the + file cannot be closed as long as references to it exist. version : {1, 2}, optional version of netcdf to read / write, where 1 means *Classic format* and 2 means *64-bit offset format*. Default is 1. See - `here `_ + `here `__ for more info. + maskandscale : bool, optional + Whether to automatically scale and/or mask data based on attributes. + Default is False. Notes ----- @@ -117,7 +144,7 @@ class netcdf_file(object): NetCDF files are a self-describing binary data format. The file contains metadata that describes the dimensions and variables in the file. More details about NetCDF files can be found `here - `_. There + `__. There are three main sections to a NetCDF data structure: 1. Dimensions @@ -145,6 +172,13 @@ class netcdf_file(object): unnecessary data into memory. It uses the ``mmap`` module to create Numpy arrays mapped to the data on disk, for the same purpose. + Note that when `netcdf_file` is used to open a file with mmap=True + (default for read-only), arrays returned by it refer to data + directly on the disk. The file should not be closed, and cannot be cleanly + closed when asked, if such arrays are alive. You may want to copy data arrays + obtained from mmapped Netcdf file if they are to be processed after the file + is closed, see the example below. + Examples -------- To create a NetCDF file: @@ -166,9 +200,9 @@ class netcdf_file(object): >>> time.units = 'days since 2008-01-01' >>> f.close() - Note the assignment of ``range(10)`` to ``time[:]``. Exposing the slice + Note the assignment of ``arange(10)`` to ``time[:]``. Exposing the slice of the time variable allows for the data to be set in the object, rather - than letting ``range(10)`` overwrite the ``time`` variable. + than letting ``arange(10)`` overwrite the ``time`` variable. To read the NetCDF file we just created: @@ -182,7 +216,21 @@ class netcdf_file(object): True >>> time[-1] 9 + + NetCDF files, when opened read-only, return arrays that refer + directly to memory-mapped data on disk: + + >>> data = time[:] + >>> data.base.base + + + If the data is to be processed after the file is closed, it needs + to be copied to main memory: + + >>> data = time[:].copy() >>> f.close() + >>> data.mean() + 4.5 A NetCDF file can also be used as context manager: @@ -196,8 +244,12 @@ class netcdf_file(object): >>> os.unlink(fname) >>> os.rmdir(tmp_pth) """ - def __init__(self, filename, mode='r', mmap=None, version=1): + def __init__(self, filename, mode='r', mmap=None, version=1, + maskandscale=False): """Initialize netcdf_file from fileobj (str or file-like).""" + if mode not in 'rwa': + raise ValueError("Mode must be either 'r', 'w' or 'a'.") + if hasattr(filename, 'seek'): # file-like self.fp = filename self.filename = 'None' @@ -207,34 +259,39 @@ def __init__(self, filename, mode='r', mmap=None, version=1): raise ValueError('Cannot use file object for mmap') else: # maybe it's a string self.filename = filename - self.fp = open(self.filename, '%sb' % mode) + omode = 'r+' if mode == 'a' else mode + self.fp = open(self.filename, '%sb' % omode) if mmap is None: - mmap = True - try: - self.fp.seek(0, 2) - except ValueError: - self.file_bytes = -1 # Unknown file length (gzip). - else: - self.file_bytes = self.fp.tell() - self.fp.seek(0) + # Mmapped files on PyPy cannot be usually closed + # before the GC runs, so it's better to use mmap=False + # as the default. + mmap = (not IS_PYPY) - self.use_mmap = mmap - self.version_byte = version + if mode != 'r': + # Cannot read write-only files + mmap = False - if not mode in 'rw': - raise ValueError("Mode must be either 'r' or 'w'.") + self.use_mmap = mmap self.mode = mode + self.version_byte = version + self.maskandscale = maskandscale - self.dimensions = {} - self.variables = {} + self.dimensions = OrderedDict() + self.variables = OrderedDict() self._dims = [] self._recs = 0 self._recsize = 0 - self._attributes = {} + self._mm = None + self._mm_buf = None + if self.use_mmap: + self._mm = mm.mmap(self.fp.fileno(), 0, access=mm.ACCESS_READ) + self._mm_buf = np.frombuffer(self._mm, dtype=np.int8) - if mode == 'r': + self._attributes = OrderedDict() + + if mode in 'ra': self._read() def __setattr__(self, attr, value): @@ -248,10 +305,28 @@ def __setattr__(self, attr, value): def close(self): """Closes the NetCDF file.""" - if not self.fp.closed: + if hasattr(self, 'fp') and not self.fp.closed: try: self.flush() finally: + self.variables = OrderedDict() + if self._mm_buf is not None: + ref = weakref.ref(self._mm_buf) + self._mm_buf = None + if ref() is None: + # self._mm_buf is gc'd, and we can close the mmap + self._mm.close() + else: + # we cannot close self._mm, since self._mm_buf is + # alive and there may still be arrays referring to it + warnings.warn(( + "Cannot close a netcdf_file opened with mmap=True, when " + "netcdf_variables or arrays referring to its data still exist. " + "All data arrays obtained from such files refer directly to " + "data on disk, and must be copied before the file can be cleanly " + "closed. (See netcdf_file docstring for more information on mmap.)" + ), category=RuntimeWarning) + self._mm = None self.fp.close() __del__ = close @@ -281,6 +356,9 @@ def createDimension(self, name, length): createVariable """ + if length is None and self._dims: + raise ValueError("Only first dimension may be unlimited!") + self.dimensions[name] = length self._dims.append(name) @@ -324,7 +402,9 @@ def createVariable(self, name, type, dimensions): raise ValueError("NetCDF 3 does not support type %s" % type) data = empty(shape_, dtype=type.newbyteorder("B")) # convert to big endian always for NetCDF 3 - self.variables[name] = netcdf_variable(data, typecode, size, shape, dimensions) + self.variables[name] = netcdf_variable( + data, typecode, size, shape, dimensions, + maskandscale=self.maskandscale) return self.variables[name] def flush(self): @@ -336,7 +416,7 @@ def flush(self): sync : Identical function """ - if hasattr(self, 'mode') and self.mode is 'w': + if hasattr(self, 'mode') and self.mode in 'wa': self._write() sync = flush @@ -378,7 +458,7 @@ def _write_att_array(self, attributes): self._pack_int(len(attributes)) for name, values in attributes.items(): self._pack_string(name) - self._write_values(values) + self._write_att_values(values) else: self.fp.write(ABSENT) @@ -387,11 +467,13 @@ def _write_var_array(self): self.fp.write(NC_VARIABLE) self._pack_int(len(self.variables)) - # Sort variables non-recs first, then recs. We use a DSU - # since some people use pupynere with Python 2.3.x. - deco = [(v._shape and not v.isrec, k) for (k, v) in self.variables.items()] - deco.sort() - variables = [k for (unused, k) in deco][::-1] + # Sort variable names non-recs first, then recs. + def sortkey(n): + v = self.variables[n] + if v.isrec: + return (-1,) + return v._shape + variables = sorted(self.variables, key=sortkey, reverse=True) # Set the metadata for all variables. for name in variables: @@ -429,8 +511,8 @@ def _write_var_metadata(self, name): vsize = var.data[0].size * var.data.itemsize except IndexError: vsize = 0 - rec_vars = len([var for var in self.variables.values() - if var.isrec]) + rec_vars = len([v for v in self.variables.values() + if v.isrec]) if rec_vars > 1: vsize += -vsize % 4 self.variables[name].__dict__['_vsize'] = vsize @@ -453,12 +535,17 @@ def _write_var_data(self, name): if not var.isrec: self.fp.write(var.data.tostring()) count = var.data.size * var.data.itemsize - self.fp.write(b'0' * (var._vsize - count)) + self._write_var_padding(var, var._vsize - count) else: # record variable # Handle rec vars with shape[0] < nrecs. if self._recs > len(var.data): shape = (self._recs,) + var.data.shape[1:] - var.data.resize(shape) + # Resize in-place does not always work since + # the array might not be single-segment + try: + var.data.resize(shape) + except ValueError: + var.__dict__['data'] = np.resize(var.data, shape).astype(var.data.dtype) pos0 = pos = self.fp.tell() for rec in var.data: @@ -471,30 +558,42 @@ def _write_var_data(self, name): self.fp.write(rec.tostring()) # Padding count = rec.size * rec.itemsize - self.fp.write(b'0' * (var._vsize - count)) + self._write_var_padding(var, var._vsize - count) pos += self._recsize self.fp.seek(pos) self.fp.seek(pos0 + var._vsize) - def _write_values(self, values): + def _write_var_padding(self, var, size): + encoded_fill_value = var._get_encoded_fill_value() + num_fills = size // len(encoded_fill_value) + self.fp.write(encoded_fill_value * num_fills) + + def _write_att_values(self, values): if hasattr(values, 'dtype'): nc_type = REVERSE[values.dtype.char, values.dtype.itemsize] else: types = [(t, NC_INT) for t in integer_types] types += [ (float, NC_FLOAT), - (str, NC_CHAR), + (str, NC_CHAR) ] - try: - sample = values[0] - except TypeError: + # bytes index into scalars in py3k. Check for "string" types + if isinstance(values, text_type) or isinstance(values, binary_type): sample = values + else: + try: + sample = values[0] # subscriptable? + except TypeError: + sample = values # scalar + for class_, nc_type in types: if isinstance(sample, class_): break typecode, size = TYPEMAP[nc_type] dtype_ = '>%s' % typecode + # asarray() dies with bytes and '>c' in py3k. Change to 'S' + dtype_ = 'S' if dtype_ == '>c' else dtype_ values = asarray(values, dtype=dtype_) @@ -511,7 +610,7 @@ def _write_values(self, values): values = values.byteswap() self.fp.write(values.tostring()) count = values.size * values.itemsize - self.fp.write(b'0' * (-count % 4)) # pad + self.fp.write(b'\x00' * (-count % 4)) # pad def _read(self): # Check magic bytes and version @@ -532,7 +631,7 @@ def _read_numrecs(self): def _read_dim_array(self): header = self.fp.read(4) - if not header in [ZERO, NC_DIMENSION]: + if header not in [ZERO, NC_DIMENSION]: raise ValueError("Unexpected header.") count = self._unpack_int() @@ -548,19 +647,19 @@ def _read_gatt_array(self): def _read_att_array(self): header = self.fp.read(4) - if not header in [ZERO, NC_ATTRIBUTE]: + if header not in [ZERO, NC_ATTRIBUTE]: raise ValueError("Unexpected header.") count = self._unpack_int() - attributes = {} + attributes = OrderedDict() for attr in range(count): name = asstr(self._unpack_string()) - attributes[name] = self._read_values() + attributes[name] = self._read_att_values() return attributes def _read_var_array(self): header = self.fp.read(4) - if not header in [ZERO, NC_VARIABLE]: + if header not in [ZERO, NC_VARIABLE]: raise ValueError("Unexpected header.") begin = 0 @@ -570,7 +669,7 @@ def _read_var_array(self): for var in range(count): (name, dimensions, shape, attributes, typecode, size, dtype_, begin_, vsize) = self._read_var() - # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html + # https://www.unidata.ucar.edu/software/netcdf/docs/user_guide.html # Note that vsize is the product of the dimension lengths # (omitting the record dimension) and the number of bytes # per value (determined from the type), increased to the @@ -607,28 +706,21 @@ def _read_var_array(self): else: # not a record variable # Calculate size to avoid problems with vsize (above) a_size = reduce(mul, shape, 1) * size - if self.file_bytes >= 0 and begin_ + a_size > self.file_bytes: - data = frombuffer(b'\x00'*a_size, dtype=dtype_) - elif self.use_mmap: - mm = mmap(self.fp.fileno(), begin_+a_size, access=ACCESS_READ) - data = ndarray.__new__(ndarray, shape, dtype=dtype_, - buffer=mm, offset=begin_, order=0) + if self.use_mmap: + data = self._mm_buf[begin_:begin_+a_size].view(dtype=dtype_) + data.shape = shape else: pos = self.fp.tell() self.fp.seek(begin_) - # Try to read file, which may fail because the data is - # at or past the end of file. In that case, we treat - # this data as zeros. - buf = self.fp.read(a_size) - if len(buf) < a_size: - buf = b'\x00'*a_size - data = frombuffer(buf, dtype=dtype_) + data = frombuffer(self.fp.read(a_size), dtype=dtype_ + ).copy() data.shape = shape self.fp.seek(pos) # Add variable. self.variables[name] = netcdf_variable( - data, typecode, size, shape, dimensions, attributes) + data, typecode, size, shape, dimensions, attributes, + maskandscale=self.maskandscale) if rec_vars: # Remove padding when only one record variable. @@ -638,13 +730,13 @@ def _read_var_array(self): # Build rec array. if self.use_mmap: - mm = mmap(self.fp.fileno(), begin+self._recs*self._recsize, access=ACCESS_READ) - rec_array = ndarray.__new__(ndarray, (self._recs,), dtype=dtypes, - buffer=mm, offset=begin, order=0) + rec_array = self._mm_buf[begin:begin+self._recs*self._recsize].view(dtype=dtypes) + rec_array.shape = (self._recs,) else: pos = self.fp.tell() self.fp.seek(begin) - rec_array = frombuffer(self.fp.read(self._recs*self._recsize), dtype=dtypes) + rec_array = frombuffer(self.fp.read(self._recs*self._recsize), + dtype=dtypes).copy() rec_array.shape = (self._recs,) self.fp.seek(pos) @@ -676,7 +768,7 @@ def _read_var(self): return name, dimensions, shape, attributes, typecode, size, dtype_, begin, vsize - def _read_values(self): + def _read_att_values(self): nc_type = self.fp.read(4) n = self._unpack_int() @@ -687,7 +779,7 @@ def _read_values(self): self.fp.read(-count % 4) # read padding if typecode is not 'c': - values = frombuffer(values, dtype='>%s' % typecode) + values = frombuffer(values, dtype='>%s' % typecode).copy() if values.shape == (1,): values = values[0] else: @@ -718,7 +810,7 @@ def _pack_string(self, s): count = len(s) self._pack_int(count) self.fp.write(asbytes(s)) - self.fp.write(b'0' * (-count % 4)) # pad + self.fp.write(b'\x00' * (-count % 4)) # pad def _unpack_string(self): count = self._unpack_int() @@ -729,7 +821,7 @@ def _unpack_string(self): class netcdf_variable(object): """ - A data object for the `netcdf` module. + A data object for netcdf files. `netcdf_variable` objects are constructed by calling the method `netcdf_file.createVariable` on the `netcdf_file` object. `netcdf_variable` @@ -763,6 +855,9 @@ class netcdf_variable(object): attributes : dict, optional Attribute values (any type) keyed by string names. These attributes become attributes for the netcdf_variable object. + maskandscale : bool, optional + Whether to automatically scale and/or mask data based on attributes. + Default is False. Attributes @@ -777,14 +872,17 @@ class netcdf_variable(object): isrec, shape """ - def __init__(self, data, typecode, size, shape, dimensions, attributes=None): + def __init__(self, data, typecode, size, shape, dimensions, + attributes=None, + maskandscale=False): self.data = data self._typecode = typecode self._size = size self._shape = shape self.dimensions = dimensions + self.maskandscale = maskandscale - self._attributes = attributes or {} + self._attributes = attributes or OrderedDict() for k, v in self._attributes.items(): self.__dict__[k] = v @@ -806,7 +904,7 @@ def isrec(self): `netcdf_variable`. """ - return self.data.shape and not self._shape[0] + return bool(self.data.shape) and not self._shape[0] isrec = property(isrec) def shape(self): @@ -883,9 +981,36 @@ def itemsize(self): return self._size def __getitem__(self, index): - return self.data[index] + if not self.maskandscale: + return self.data[index] + + data = self.data[index].copy() + missing_value = self._get_missing_value() + data = self._apply_missing_value(data, missing_value) + scale_factor = self._attributes.get('scale_factor') + add_offset = self._attributes.get('add_offset') + if add_offset is not None or scale_factor is not None: + data = data.astype(np.float64) + if scale_factor is not None: + data = data * scale_factor + if add_offset is not None: + data += add_offset + + return data def __setitem__(self, index, data): + if self.maskandscale: + missing_value = ( + self._get_missing_value() or + getattr(data, 'fill_value', 999999)) + self._attributes.setdefault('missing_value', missing_value) + self._attributes.setdefault('_FillValue', missing_value) + data = ((data - self._attributes.get('add_offset', 0.0)) / + self._attributes.get('scale_factor', 1.0)) + data = np.ma.asarray(data).filled(missing_value) + if self._typecode not in 'fd' and data.dtype.kind == 'f': + data = np.round(data) + # Expand data for record vars? if self.isrec: if isinstance(index, tuple): @@ -898,9 +1023,86 @@ def __setitem__(self, index, data): recs = rec_index + 1 if recs > len(self.data): shape = (recs,) + self._shape[1:] - self.data.resize(shape) + # Resize in-place does not always work since + # the array might not be single-segment + try: + self.data.resize(shape) + except ValueError: + self.__dict__['data'] = np.resize(self.data, shape).astype(self.data.dtype) self.data[index] = data + def _default_encoded_fill_value(self): + """ + The default encoded fill-value for this Variable's data type. + """ + nc_type = REVERSE[self.typecode(), self.itemsize()] + return FILLMAP[nc_type] + + def _get_encoded_fill_value(self): + """ + Returns the encoded fill value for this variable as bytes. + + This is taken from either the _FillValue attribute, or the default fill + value for this variable's data type. + """ + if '_FillValue' in self._attributes: + fill_value = np.array(self._attributes['_FillValue'], + dtype=self.data.dtype).tostring() + if len(fill_value) == self.itemsize(): + return fill_value + else: + return self._default_encoded_fill_value() + else: + return self._default_encoded_fill_value() + + def _get_missing_value(self): + """ + Returns the value denoting "no data" for this variable. + + If this variable does not have a missing/fill value, returns None. + + If both _FillValue and missing_value are given, give precedence to + _FillValue. The netCDF standard gives special meaning to _FillValue; + missing_value is just used for compatibility with old datasets. + """ + + if '_FillValue' in self._attributes: + missing_value = self._attributes['_FillValue'] + elif 'missing_value' in self._attributes: + missing_value = self._attributes['missing_value'] + else: + missing_value = None + + return missing_value + + @staticmethod + def _apply_missing_value(data, missing_value): + """ + Applies the given missing value to the data array. + + Returns a numpy.ma array, with any value equal to missing_value masked + out (unless missing_value is None, in which case the original array is + returned). + """ + + if missing_value is None: + newdata = data + else: + try: + missing_value_isnan = np.isnan(missing_value) + except (TypeError, NotImplementedError): + # some data types (e.g., characters) cannot be tested for NaN + missing_value_isnan = False + + if missing_value_isnan: + mymask = np.isnan(data) + else: + mymask = (data == missing_value) + + newdata = np.ma.masked_where(mymask, data) + + return newdata + NetCDFFile = netcdf_file NetCDFVariable = netcdf_variable From 0b79a03c7354a5c7f0c90fd9c05f392b6ff6898a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 6 Oct 2019 09:55:58 -0400 Subject: [PATCH 291/689] DOCTEST: Avoid fixed ID in doctest --- nibabel/externals/netcdf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index b4e7512697..c464334ff3 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -221,8 +221,8 @@ class netcdf_file(object): directly to memory-mapped data on disk: >>> data = time[:] - >>> data.base.base - + >>> data.base.base # doctest: +ELLIPSIS + If the data is to be processed after the file is closed, it needs to be copied to main memory: From e15a0cadea1b2192547a7c4d909fe69bc62c89cc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 6 Oct 2019 11:19:06 -0400 Subject: [PATCH 292/689] DOCTEST: Delete reference to mmap data to avoid warning --- nibabel/externals/netcdf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index c464334ff3..3fcb4cd55a 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -228,6 +228,7 @@ class netcdf_file(object): to be copied to main memory: >>> data = time[:].copy() + >>> del time # References to mmap'd objects can delay full closure >>> f.close() >>> data.mean() 4.5 @@ -240,7 +241,7 @@ class netcdf_file(object): Delete our temporary directory and file: - >>> del f, time # needed for windows unlink + >>> del f # needed for windows unlink >>> os.unlink(fname) >>> os.rmdir(tmp_pth) """ From cfd1a2464e37402d2ac68d87dc23a2f2d03b716c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 6 Oct 2019 09:35:27 -0400 Subject: [PATCH 293/689] Re-import externals/netcdf.py from scipy --- nibabel/externals/netcdf.py | 402 +++++++++++++++++++++++++++--------- 1 file changed, 302 insertions(+), 100 deletions(-) diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index aca62aac80..6ee202ae78 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -10,12 +10,16 @@ NetCDF files. The same API is also used in the PyNIO and pynetcdf modules, allowing these modules to be used interchangeably when working with NetCDF files. + +Only NetCDF3 is supported here; for NetCDF4 see +`netCDF4-python `__, +which has a similar API. + """ # TODO: # * properly implement ``_FillValue``. -# * implement Jeff Whitaker's patch for masked variables. # * fix character variables. # * implement PAGESIZE for Python 2.6? @@ -28,18 +32,24 @@ # otherwise the key would be inserted into userspace attributes. -__all__ = ['netcdf_file'] +__all__ = ['netcdf_file', 'netcdf_variable'] +import sys +import warnings +import weakref from operator import mul -from mmap import mmap, ACCESS_READ +from collections import OrderedDict -import numpy as np # noqa -from numpy.compat.py3k import asbytes, asstr -from numpy import frombuffer, ndarray, dtype, empty, array, asarray +import mmap as mm + +import numpy as np +from numpy.compat import asbytes, asstr +from numpy import frombuffer, dtype, empty, array, asarray from numpy import little_endian as LITTLE_ENDIAN from functools import reduce +IS_PYPY = ('__pypy__' in sys.modules) ABSENT = b'\x00\x00\x00\x00\x00\x00\x00\x00' ZERO = b'\x00\x00\x00\x00' @@ -52,27 +62,39 @@ NC_DIMENSION = b'\x00\x00\x00\n' NC_VARIABLE = b'\x00\x00\x00\x0b' NC_ATTRIBUTE = b'\x00\x00\x00\x0c' - +FILL_BYTE = b'\x81' +FILL_CHAR = b'\x00' +FILL_SHORT = b'\x80\x01' +FILL_INT = b'\x80\x00\x00\x01' +FILL_FLOAT = b'\x7C\xF0\x00\x00' +FILL_DOUBLE = b'\x47\x9E\x00\x00\x00\x00\x00\x00' TYPEMAP = {NC_BYTE: ('b', 1), - NC_CHAR: ('c', 1), - NC_SHORT: ('h', 2), - NC_INT: ('i', 4), - NC_FLOAT: ('f', 4), - NC_DOUBLE: ('d', 8)} + NC_CHAR: ('c', 1), + NC_SHORT: ('h', 2), + NC_INT: ('i', 4), + NC_FLOAT: ('f', 4), + NC_DOUBLE: ('d', 8)} + +FILLMAP = {NC_BYTE: FILL_BYTE, + NC_CHAR: FILL_CHAR, + NC_SHORT: FILL_SHORT, + NC_INT: FILL_INT, + NC_FLOAT: FILL_FLOAT, + NC_DOUBLE: FILL_DOUBLE} REVERSE = {('b', 1): NC_BYTE, - ('B', 1): NC_CHAR, - ('c', 1): NC_CHAR, - ('h', 2): NC_SHORT, - ('i', 4): NC_INT, - ('f', 4): NC_FLOAT, - ('d', 8): NC_DOUBLE, + ('B', 1): NC_CHAR, + ('c', 1): NC_CHAR, + ('h', 2): NC_SHORT, + ('i', 4): NC_INT, + ('f', 4): NC_FLOAT, + ('d', 8): NC_DOUBLE, - # these come from asarray(1).dtype.char and asarray('foo').dtype.char, - # used when getting the types from generic attributes. - ('l', 4): NC_INT, - ('S', 1): NC_CHAR} + # these come from asarray(1).dtype.char and asarray('foo').dtype.char, + # used when getting the types from generic attributes. + ('l', 4): NC_INT, + ('S', 1): NC_CHAR} class netcdf_file(object): @@ -93,17 +115,22 @@ class netcdf_file(object): ---------- filename : string or file-like string -> filename - mode : {'r', 'w'}, optional - read-write mode, default is 'r' + mode : {'r', 'w', 'a'}, optional + read-write-append mode, default is 'r' mmap : None or bool, optional Whether to mmap `filename` when reading. Default is True when `filename` is a file name, False when `filename` is a - file-like object + file-like object. Note that when mmap is in use, data arrays + returned refer directly to the mmapped data on disk, and the + file cannot be closed as long as references to it exist. version : {1, 2}, optional version of netcdf to read / write, where 1 means *Classic format* and 2 means *64-bit offset format*. Default is 1. See - `here `_ + `here `__ for more info. + maskandscale : bool, optional + Whether to automatically scale and/or mask data based on attributes. + Default is False. Notes ----- @@ -114,7 +141,7 @@ class netcdf_file(object): NetCDF files are a self-describing binary data format. The file contains metadata that describes the dimensions and variables in the file. More details about NetCDF files can be found `here - `_. There + `__. There are three main sections to a NetCDF data structure: 1. Dimensions @@ -142,6 +169,13 @@ class netcdf_file(object): unnecessary data into memory. It uses the ``mmap`` module to create Numpy arrays mapped to the data on disk, for the same purpose. + Note that when `netcdf_file` is used to open a file with mmap=True + (default for read-only), arrays returned by it refer to data + directly on the disk. The file should not be closed, and cannot be cleanly + closed when asked, if such arrays are alive. You may want to copy data arrays + obtained from mmapped Netcdf file if they are to be processed after the file + is closed, see the example below. + Examples -------- To create a NetCDF file: @@ -163,9 +197,9 @@ class netcdf_file(object): >>> time.units = 'days since 2008-01-01' >>> f.close() - Note the assignment of ``range(10)`` to ``time[:]``. Exposing the slice + Note the assignment of ``arange(10)`` to ``time[:]``. Exposing the slice of the time variable allows for the data to be set in the object, rather - than letting ``range(10)`` overwrite the ``time`` variable. + than letting ``arange(10)`` overwrite the ``time`` variable. To read the NetCDF file we just created: @@ -179,7 +213,21 @@ class netcdf_file(object): True >>> time[-1] 9 + + NetCDF files, when opened read-only, return arrays that refer + directly to memory-mapped data on disk: + + >>> data = time[:] + >>> data.base.base + + + If the data is to be processed after the file is closed, it needs + to be copied to main memory: + + >>> data = time[:].copy() >>> f.close() + >>> data.mean() + 4.5 A NetCDF file can also be used as context manager: @@ -193,8 +241,12 @@ class netcdf_file(object): >>> os.unlink(fname) >>> os.rmdir(tmp_pth) """ - def __init__(self, filename, mode='r', mmap=None, version=1): + def __init__(self, filename, mode='r', mmap=None, version=1, + maskandscale=False): """Initialize netcdf_file from fileobj (str or file-like).""" + if mode not in 'rwa': + raise ValueError("Mode must be either 'r', 'w' or 'a'.") + if hasattr(filename, 'seek'): # file-like self.fp = filename self.filename = 'None' @@ -204,34 +256,39 @@ def __init__(self, filename, mode='r', mmap=None, version=1): raise ValueError('Cannot use file object for mmap') else: # maybe it's a string self.filename = filename - self.fp = open(self.filename, '%sb' % mode) + omode = 'r+' if mode == 'a' else mode + self.fp = open(self.filename, '%sb' % omode) if mmap is None: - mmap = True - try: - self.fp.seek(0, 2) - except ValueError: - self.file_bytes = -1 # Unknown file length (gzip). - else: - self.file_bytes = self.fp.tell() - self.fp.seek(0) + # Mmapped files on PyPy cannot be usually closed + # before the GC runs, so it's better to use mmap=False + # as the default. + mmap = (not IS_PYPY) - self.use_mmap = mmap - self.version_byte = version + if mode != 'r': + # Cannot read write-only files + mmap = False - if not mode in 'rw': - raise ValueError("Mode must be either 'r' or 'w'.") + self.use_mmap = mmap self.mode = mode + self.version_byte = version + self.maskandscale = maskandscale - self.dimensions = {} - self.variables = {} + self.dimensions = OrderedDict() + self.variables = OrderedDict() self._dims = [] self._recs = 0 self._recsize = 0 - self._attributes = {} + self._mm = None + self._mm_buf = None + if self.use_mmap: + self._mm = mm.mmap(self.fp.fileno(), 0, access=mm.ACCESS_READ) + self._mm_buf = np.frombuffer(self._mm, dtype=np.int8) - if mode == 'r': + self._attributes = OrderedDict() + + if mode in 'ra': self._read() def __setattr__(self, attr, value): @@ -245,10 +302,28 @@ def __setattr__(self, attr, value): def close(self): """Closes the NetCDF file.""" - if not self.fp.closed: + if hasattr(self, 'fp') and not self.fp.closed: try: self.flush() finally: + self.variables = OrderedDict() + if self._mm_buf is not None: + ref = weakref.ref(self._mm_buf) + self._mm_buf = None + if ref() is None: + # self._mm_buf is gc'd, and we can close the mmap + self._mm.close() + else: + # we cannot close self._mm, since self._mm_buf is + # alive and there may still be arrays referring to it + warnings.warn(( + "Cannot close a netcdf_file opened with mmap=True, when " + "netcdf_variables or arrays referring to its data still exist. " + "All data arrays obtained from such files refer directly to " + "data on disk, and must be copied before the file can be cleanly " + "closed. (See netcdf_file docstring for more information on mmap.)" + ), category=RuntimeWarning) + self._mm = None self.fp.close() __del__ = close @@ -278,6 +353,9 @@ def createDimension(self, name, length): createVariable """ + if length is None and self._dims: + raise ValueError("Only first dimension may be unlimited!") + self.dimensions[name] = length self._dims.append(name) @@ -321,7 +399,9 @@ def createVariable(self, name, type, dimensions): raise ValueError("NetCDF 3 does not support type %s" % type) data = empty(shape_, dtype=type.newbyteorder("B")) # convert to big endian always for NetCDF 3 - self.variables[name] = netcdf_variable(data, typecode, size, shape, dimensions) + self.variables[name] = netcdf_variable( + data, typecode, size, shape, dimensions, + maskandscale=self.maskandscale) return self.variables[name] def flush(self): @@ -333,7 +413,7 @@ def flush(self): sync : Identical function """ - if hasattr(self, 'mode') and self.mode is 'w': + if hasattr(self, 'mode') and self.mode in 'wa': self._write() sync = flush @@ -375,7 +455,7 @@ def _write_att_array(self, attributes): self._pack_int(len(attributes)) for name, values in attributes.items(): self._pack_string(name) - self._write_values(values) + self._write_att_values(values) else: self.fp.write(ABSENT) @@ -384,11 +464,13 @@ def _write_var_array(self): self.fp.write(NC_VARIABLE) self._pack_int(len(self.variables)) - # Sort variables non-recs first, then recs. We use a DSU - # since some people use pupynere with Python 2.3.x. - deco = [(v._shape and not v.isrec, k) for (k, v) in self.variables.items()] - deco.sort() - variables = [k for (unused, k) in deco][::-1] + # Sort variable names non-recs first, then recs. + def sortkey(n): + v = self.variables[n] + if v.isrec: + return (-1,) + return v._shape + variables = sorted(self.variables, key=sortkey, reverse=True) # Set the metadata for all variables. for name in variables: @@ -426,8 +508,8 @@ def _write_var_metadata(self, name): vsize = var.data[0].size * var.data.itemsize except IndexError: vsize = 0 - rec_vars = len([var for var in self.variables.values() - if var.isrec]) + rec_vars = len([v for v in self.variables.values() + if v.isrec]) if rec_vars > 1: vsize += -vsize % 4 self.variables[name].__dict__['_vsize'] = vsize @@ -450,12 +532,17 @@ def _write_var_data(self, name): if not var.isrec: self.fp.write(var.data.tostring()) count = var.data.size * var.data.itemsize - self.fp.write(b'0' * (var._vsize - count)) + self._write_var_padding(var, var._vsize - count) else: # record variable # Handle rec vars with shape[0] < nrecs. if self._recs > len(var.data): shape = (self._recs,) + var.data.shape[1:] - var.data.resize(shape) + # Resize in-place does not always work since + # the array might not be single-segment + try: + var.data.resize(shape) + except ValueError: + var.__dict__['data'] = np.resize(var.data, shape).astype(var.data.dtype) pos0 = pos = self.fp.tell() for rec in var.data: @@ -468,30 +555,42 @@ def _write_var_data(self, name): self.fp.write(rec.tostring()) # Padding count = rec.size * rec.itemsize - self.fp.write(b'0' * (var._vsize - count)) + self._write_var_padding(var, var._vsize - count) pos += self._recsize self.fp.seek(pos) self.fp.seek(pos0 + var._vsize) - def _write_values(self, values): + def _write_var_padding(self, var, size): + encoded_fill_value = var._get_encoded_fill_value() + num_fills = size // len(encoded_fill_value) + self.fp.write(encoded_fill_value * num_fills) + + def _write_att_values(self, values): if hasattr(values, 'dtype'): nc_type = REVERSE[values.dtype.char, values.dtype.itemsize] else: types = [ (int, NC_INT), (float, NC_FLOAT), - (str, NC_CHAR), + (str, NC_CHAR) ] - try: - sample = values[0] - except TypeError: + # bytes index into scalars in py3k. Check for "string" types + if isinstance(values, (str, bytes)): sample = values + else: + try: + sample = values[0] # subscriptable? + except TypeError: + sample = values # scalar + for class_, nc_type in types: if isinstance(sample, class_): break typecode, size = TYPEMAP[nc_type] dtype_ = '>%s' % typecode + # asarray() dies with bytes and '>c' in py3k. Change to 'S' + dtype_ = 'S' if dtype_ == '>c' else dtype_ values = asarray(values, dtype=dtype_) @@ -508,7 +607,7 @@ def _write_values(self, values): values = values.byteswap() self.fp.write(values.tostring()) count = values.size * values.itemsize - self.fp.write(b'0' * (-count % 4)) # pad + self.fp.write(b'\x00' * (-count % 4)) # pad def _read(self): # Check magic bytes and version @@ -529,7 +628,7 @@ def _read_numrecs(self): def _read_dim_array(self): header = self.fp.read(4) - if not header in [ZERO, NC_DIMENSION]: + if header not in [ZERO, NC_DIMENSION]: raise ValueError("Unexpected header.") count = self._unpack_int() @@ -545,19 +644,19 @@ def _read_gatt_array(self): def _read_att_array(self): header = self.fp.read(4) - if not header in [ZERO, NC_ATTRIBUTE]: + if header not in [ZERO, NC_ATTRIBUTE]: raise ValueError("Unexpected header.") count = self._unpack_int() - attributes = {} + attributes = OrderedDict() for attr in range(count): name = asstr(self._unpack_string()) - attributes[name] = self._read_values() + attributes[name] = self._read_att_values() return attributes def _read_var_array(self): header = self.fp.read(4) - if not header in [ZERO, NC_VARIABLE]: + if header not in [ZERO, NC_VARIABLE]: raise ValueError("Unexpected header.") begin = 0 @@ -567,7 +666,7 @@ def _read_var_array(self): for var in range(count): (name, dimensions, shape, attributes, typecode, size, dtype_, begin_, vsize) = self._read_var() - # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html + # https://www.unidata.ucar.edu/software/netcdf/docs/user_guide.html # Note that vsize is the product of the dimension lengths # (omitting the record dimension) and the number of bytes # per value (determined from the type), increased to the @@ -604,28 +703,21 @@ def _read_var_array(self): else: # not a record variable # Calculate size to avoid problems with vsize (above) a_size = reduce(mul, shape, 1) * size - if self.file_bytes >= 0 and begin_ + a_size > self.file_bytes: - data = frombuffer(b'\x00'*a_size, dtype=dtype_) - elif self.use_mmap: - mm = mmap(self.fp.fileno(), begin_+a_size, access=ACCESS_READ) - data = ndarray.__new__(ndarray, shape, dtype=dtype_, - buffer=mm, offset=begin_, order=0) + if self.use_mmap: + data = self._mm_buf[begin_:begin_+a_size].view(dtype=dtype_) + data.shape = shape else: pos = self.fp.tell() self.fp.seek(begin_) - # Try to read file, which may fail because the data is - # at or past the end of file. In that case, we treat - # this data as zeros. - buf = self.fp.read(a_size) - if len(buf) < a_size: - buf = b'\x00'*a_size - data = frombuffer(buf, dtype=dtype_) + data = frombuffer(self.fp.read(a_size), dtype=dtype_ + ).copy() data.shape = shape self.fp.seek(pos) # Add variable. self.variables[name] = netcdf_variable( - data, typecode, size, shape, dimensions, attributes) + data, typecode, size, shape, dimensions, attributes, + maskandscale=self.maskandscale) if rec_vars: # Remove padding when only one record variable. @@ -635,13 +727,13 @@ def _read_var_array(self): # Build rec array. if self.use_mmap: - mm = mmap(self.fp.fileno(), begin+self._recs*self._recsize, access=ACCESS_READ) - rec_array = ndarray.__new__(ndarray, (self._recs,), dtype=dtypes, - buffer=mm, offset=begin, order=0) + rec_array = self._mm_buf[begin:begin+self._recs*self._recsize].view(dtype=dtypes) + rec_array.shape = (self._recs,) else: pos = self.fp.tell() self.fp.seek(begin) - rec_array = frombuffer(self.fp.read(self._recs*self._recsize), dtype=dtypes) + rec_array = frombuffer(self.fp.read(self._recs*self._recsize), + dtype=dtypes).copy() rec_array.shape = (self._recs,) self.fp.seek(pos) @@ -673,7 +765,7 @@ def _read_var(self): return name, dimensions, shape, attributes, typecode, size, dtype_, begin, vsize - def _read_values(self): + def _read_att_values(self): nc_type = self.fp.read(4) n = self._unpack_int() @@ -684,7 +776,7 @@ def _read_values(self): self.fp.read(-count % 4) # read padding if typecode is not 'c': - values = frombuffer(values, dtype='>%s' % typecode) + values = frombuffer(values, dtype='>%s' % typecode).copy() if values.shape == (1,): values = values[0] else: @@ -715,7 +807,7 @@ def _pack_string(self, s): count = len(s) self._pack_int(count) self.fp.write(asbytes(s)) - self.fp.write(b'0' * (-count % 4)) # pad + self.fp.write(b'\x00' * (-count % 4)) # pad def _unpack_string(self): count = self._unpack_int() @@ -726,7 +818,7 @@ def _unpack_string(self): class netcdf_variable(object): """ - A data object for the `netcdf` module. + A data object for netcdf files. `netcdf_variable` objects are constructed by calling the method `netcdf_file.createVariable` on the `netcdf_file` object. `netcdf_variable` @@ -760,6 +852,9 @@ class netcdf_variable(object): attributes : dict, optional Attribute values (any type) keyed by string names. These attributes become attributes for the netcdf_variable object. + maskandscale : bool, optional + Whether to automatically scale and/or mask data based on attributes. + Default is False. Attributes @@ -774,14 +869,17 @@ class netcdf_variable(object): isrec, shape """ - def __init__(self, data, typecode, size, shape, dimensions, attributes=None): + def __init__(self, data, typecode, size, shape, dimensions, + attributes=None, + maskandscale=False): self.data = data self._typecode = typecode self._size = size self._shape = shape self.dimensions = dimensions + self.maskandscale = maskandscale - self._attributes = attributes or {} + self._attributes = attributes or OrderedDict() for k, v in self._attributes.items(): self.__dict__[k] = v @@ -803,7 +901,7 @@ def isrec(self): `netcdf_variable`. """ - return self.data.shape and not self._shape[0] + return bool(self.data.shape) and not self._shape[0] isrec = property(isrec) def shape(self): @@ -880,9 +978,36 @@ def itemsize(self): return self._size def __getitem__(self, index): - return self.data[index] + if not self.maskandscale: + return self.data[index] + + data = self.data[index].copy() + missing_value = self._get_missing_value() + data = self._apply_missing_value(data, missing_value) + scale_factor = self._attributes.get('scale_factor') + add_offset = self._attributes.get('add_offset') + if add_offset is not None or scale_factor is not None: + data = data.astype(np.float64) + if scale_factor is not None: + data = data * scale_factor + if add_offset is not None: + data += add_offset + + return data def __setitem__(self, index, data): + if self.maskandscale: + missing_value = ( + self._get_missing_value() or + getattr(data, 'fill_value', 999999)) + self._attributes.setdefault('missing_value', missing_value) + self._attributes.setdefault('_FillValue', missing_value) + data = ((data - self._attributes.get('add_offset', 0.0)) / + self._attributes.get('scale_factor', 1.0)) + data = np.ma.asarray(data).filled(missing_value) + if self._typecode not in 'fd' and data.dtype.kind == 'f': + data = np.round(data) + # Expand data for record vars? if self.isrec: if isinstance(index, tuple): @@ -895,9 +1020,86 @@ def __setitem__(self, index, data): recs = rec_index + 1 if recs > len(self.data): shape = (recs,) + self._shape[1:] - self.data.resize(shape) + # Resize in-place does not always work since + # the array might not be single-segment + try: + self.data.resize(shape) + except ValueError: + self.__dict__['data'] = np.resize(self.data, shape).astype(self.data.dtype) self.data[index] = data + def _default_encoded_fill_value(self): + """ + The default encoded fill-value for this Variable's data type. + """ + nc_type = REVERSE[self.typecode(), self.itemsize()] + return FILLMAP[nc_type] + + def _get_encoded_fill_value(self): + """ + Returns the encoded fill value for this variable as bytes. + + This is taken from either the _FillValue attribute, or the default fill + value for this variable's data type. + """ + if '_FillValue' in self._attributes: + fill_value = np.array(self._attributes['_FillValue'], + dtype=self.data.dtype).tostring() + if len(fill_value) == self.itemsize(): + return fill_value + else: + return self._default_encoded_fill_value() + else: + return self._default_encoded_fill_value() + + def _get_missing_value(self): + """ + Returns the value denoting "no data" for this variable. + + If this variable does not have a missing/fill value, returns None. + + If both _FillValue and missing_value are given, give precedence to + _FillValue. The netCDF standard gives special meaning to _FillValue; + missing_value is just used for compatibility with old datasets. + """ + + if '_FillValue' in self._attributes: + missing_value = self._attributes['_FillValue'] + elif 'missing_value' in self._attributes: + missing_value = self._attributes['missing_value'] + else: + missing_value = None + + return missing_value + + @staticmethod + def _apply_missing_value(data, missing_value): + """ + Applies the given missing value to the data array. + + Returns a numpy.ma array, with any value equal to missing_value masked + out (unless missing_value is None, in which case the original array is + returned). + """ + + if missing_value is None: + newdata = data + else: + try: + missing_value_isnan = np.isnan(missing_value) + except (TypeError, NotImplementedError): + # some data types (e.g., characters) cannot be tested for NaN + missing_value_isnan = False + + if missing_value_isnan: + mymask = np.isnan(data) + else: + mymask = (data == missing_value) + + newdata = np.ma.masked_where(mymask, data) + + return newdata + NetCDFFile = netcdf_file NetCDFVariable = netcdf_variable From 5e7795bdfd19f37354e0afbd21c544a0ba235fa3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 6 Oct 2019 09:55:58 -0400 Subject: [PATCH 294/689] DOCTEST: Avoid fixed ID in doctest --- nibabel/externals/netcdf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index 6ee202ae78..21955d5880 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -218,8 +218,8 @@ class netcdf_file(object): directly to memory-mapped data on disk: >>> data = time[:] - >>> data.base.base - + >>> data.base.base # doctest: +ELLIPSIS + If the data is to be processed after the file is closed, it needs to be copied to main memory: From b6a2e683a587b7a3e40355573538b43f36c281c3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 6 Oct 2019 11:19:06 -0400 Subject: [PATCH 295/689] DOCTEST: Delete reference to mmap data to avoid warning --- nibabel/externals/netcdf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index 21955d5880..30b30f5a7f 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -225,6 +225,7 @@ class netcdf_file(object): to be copied to main memory: >>> data = time[:].copy() + >>> del time # References to mmap'd objects can delay full closure >>> f.close() >>> data.mean() 4.5 @@ -237,7 +238,7 @@ class netcdf_file(object): Delete our temporary directory and file: - >>> del f, time # needed for windows unlink + >>> del f # needed for windows unlink >>> os.unlink(fname) >>> os.rmdir(tmp_pth) """ From f84b6654dbc8bf9f0b300365930ea2c378503ce4 Mon Sep 17 00:00:00 2001 From: Egor Panfilov Date: Mon, 7 Oct 2019 21:22:38 +0300 Subject: [PATCH 296/689] Fixed A-P to P-A in coord systems docs --- doc/source/coordinate_systems.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/coordinate_systems.rst b/doc/source/coordinate_systems.rst index 9f7aa1d8f4..9541dc6f82 100644 --- a/doc/source/coordinate_systems.rst +++ b/doc/source/coordinate_systems.rst @@ -215,7 +215,7 @@ From scanner to subject If the subject is lying in the usual position for a brain scan, face up and head first in the scanner, then scanner-left/right is also the left-right -axis of the subject's head, scanner-floor/ceiling is the anterior-posterior +axis of the subject's head, scanner-floor/ceiling is the posterior-anterior axis of the head and scanner-bore is the inferior-superior axis of the head. Sometimes the subject is not lying in the standard position. For example, the @@ -231,14 +231,14 @@ position of the subject. The most common subject-centered scanner coordinate system in neuroimaging is called "scanner RAS" (right, anterior, superior). Here the scanner axes are reordered and flipped so that the first axis is the scanner axis that is closest to the left to right axis of the subject, the -second is the closest scanner axis to the anterior-posterior axis of the +second is the closest scanner axis to the posterior-anterior axis of the subject, and the third is the closest scanner axis to the inferior-superior axis of the subject. For example, if the subject was lying face to the right in the scanner, then the first (X) axis of the reference system would be scanner-floor/ceiling, but reversed so that positive values are towards the floor. This axis goes from left to right in the subject, with positive values to the right. The second (Y) axis would be scanner-left/right -(anterior-posterior in the subject), and the Z axis would be scanner-bore +(posterior-anterior in the subject), and the Z axis would be scanner-bore (inferior-superior). Naming reference spaces From bd95ce87933f7b91cf9ed473ee480f2a3de7d9ce Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 8 Oct 2019 10:50:37 +0100 Subject: [PATCH 297/689] add doc and example for surface gii files --- nibabel/gifti/gifti.py | 37 ++++++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index ddf761d4b8..b43f43ec8f 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -700,29 +700,40 @@ def agg_data(self, intent_code=None): Examples: >>> import nibabel as nib >>> from nibabel.testing import test_data - >>> gii_fname = test_data('gifti', 'ascii.gii') - >>> gii_img = nib.load(gii_fname) + >>> surf_gii_fname = test_data('gifti', 'ascii.gii') + >>> surf_gii_img = nib.load(surf_gii_fname) When not passing anything to``intent_code`` - >>> gii_img.agg_data() + >>> surf_gii_img.agg_data() + (array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32), + array([0, 1, 2], dtype=int32)) When passig matching intend codes ``intent_code`` - >>> gii_img.agg_data('pointset') + >>> surf_gii_img.agg_data('pointset') + array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32) - >>> gii_img.agg_data('triangle') - - >>> gii_img.agg_data('time series') + >>> surf_gii_img.agg_data('triangle') + array([0, 1, 2], dtype=int32) When passing mismatching intent codes ``intent_code`` - >>> gii_img.agg_data('time series') + >>> surf_gii_img.agg_data('time series') () # return a empty ``tuple`` When passing tuple ``intent_code`` - >>> gii_img.agg_data(('pointset', 'triangle')) - - >>> gii_img.agg_data(('triangle', 'pointset')) - - + >>> surf_gii_img.agg_data(('pointset', 'triangle')) + (array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32), + array([0, 1, 2], dtype=int32)) + + >>> surf_gii_img.agg_data(('triangle', 'pointset')) + (array([0, 1, 2], dtype=int32), array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) Parameters ---------- From aef5a4f5d975f3e2866e3cc0ce48a1c2e7adc646 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 11 Oct 2019 17:41:40 +0100 Subject: [PATCH 298/689] fix formatting; need timeseries example --- nibabel/gifti/gifti.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index bd222ed4d4..b01dc2b6d0 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -696,12 +696,14 @@ def agg_data(self, intent_code=None): This may be useful for ensuring that expected data arrives in a consistent order. Examples: + >>> import nibabel as nib >>> from nibabel.testing import test_data >>> surf_gii_fname = test_data('gifti', 'ascii.gii') >>> surf_gii_img = nib.load(surf_gii_fname) When not passing anything to``intent_code`` + >>> surf_gii_img.agg_data() (array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], @@ -709,6 +711,7 @@ def agg_data(self, intent_code=None): array([0, 1, 2], dtype=int32)) When passig matching intend codes ``intent_code`` + >>> surf_gii_img.agg_data('pointset') array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], @@ -718,10 +721,12 @@ def agg_data(self, intent_code=None): array([0, 1, 2], dtype=int32) When passing mismatching intent codes ``intent_code`` + >>> surf_gii_img.agg_data('time series') () # return a empty ``tuple`` When passing tuple ``intent_code`` + >>> surf_gii_img.agg_data(('pointset', 'triangle')) (array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], From 18501c4ca97758ccd1b7abb2d02274b01411076f Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 11 Oct 2019 18:43:29 +0100 Subject: [PATCH 299/689] add time series example --- nibabel/gifti/gifti.py | 46 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index b01dc2b6d0..4cfaee754b 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -701,32 +701,68 @@ def agg_data(self, intent_code=None): >>> from nibabel.testing import test_data >>> surf_gii_fname = test_data('gifti', 'ascii.gii') >>> surf_gii_img = nib.load(surf_gii_fname) + >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-xformspaceverage3_hemi-L.func.gii') + >>> func_gii_img = nib.load(func_gii_fname) When not passing anything to``intent_code`` - - >>> surf_gii_img.agg_data() + + >>> surf_gii_img.agg_data() # surface file (array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) + >>> func_gii_img.agg_data() # functional file + array([[545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , 539.53827 , + 541.3617 ], + [640.0118 , 634.727 , 630.03784 , ..., 635.21936 , 641.19586 , + 638.7647 ], + [612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , + 613.0585 ], + ..., + [101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, + 99.440796], + [371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , 363.44937 , + 363.10278 ], + [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , + 259.7127 ]], dtype=float32) + + When passig matching intend codes ``intent_code`` - >>> surf_gii_img.agg_data('pointset') + >>> surf_gii_img.agg_data('pointset') # surface file array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], [-17.61435 , -65.40164 , 21.071466]], dtype=float32) + >>> func_gii_img.agg_data('time series') # functional file + array([[545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , 539.53827 , + 541.3617 ], + [640.0118 , 634.727 , 630.03784 , ..., 635.21936 , 641.19586 , + 638.7647 ], + [612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , + 613.0585 ], + ..., + [101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, + 99.440796], + [371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , 363.44937 , + 363.10278 ], + [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , + 259.7127 ]], dtype=float32) + >>> surf_gii_img.agg_data('triangle') array([0, 1, 2], dtype=int32) When passing mismatching intent codes ``intent_code`` - + >>> surf_gii_img.agg_data('time series') () # return a empty ``tuple`` + >>> func_gii_img.agg_data('triangle') + () # return a empty ``tuple`` + When passing tuple ``intent_code`` - + >>> surf_gii_img.agg_data(('pointset', 'triangle')) (array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], From 51ff298a90435f52ac3742408ec7f9533de2305f Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 11 Oct 2019 19:49:19 +0100 Subject: [PATCH 300/689] Correct more formatting usses --- nibabel/gifti/gifti.py | 75 ++++++++++++++++-------------------------- 1 file changed, 29 insertions(+), 46 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 4cfaee754b..75a283410b 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -685,7 +685,7 @@ def agg_data(self, intent_code=None): In the general case, the numpy data array is extracted from each ``GiftiDataArray`` object and returned in a ``tuple``, in the order they are found in the GIFTI image. - If all ``GiftiDataArray``s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``), + If all ``GiftiDataArray`` s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``), then the data arrays are concatenated as columns, producing a vertex-by-time array. If an ``intent_code`` is passed, data arrays are filtered by the selected intents, before being aggregated. @@ -695,7 +695,21 @@ def agg_data(self, intent_code=None): ``agg_data`` for each element, in order. This may be useful for ensuring that expected data arrives in a consistent order. - Examples: + Parameters + ---------- + intent_code : None, string, integer or tuple of strings or integers, optional + code(s) specifying nifti intent + + Returns + ------- + tuple of ndarrays or ndarray + If the input is a tuple, the returned tuple will match the order. + + Examples + -------- + + Load two kinds of Gifti files: a surface file containing two types of intent code; + a functional file storing time series data only. >>> import nibabel as nib >>> from nibabel.testing import test_data @@ -704,85 +718,54 @@ def agg_data(self, intent_code=None): >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-xformspaceverage3_hemi-L.func.gii') >>> func_gii_img = nib.load(func_gii_fname) - When not passing anything to``intent_code`` + When not passing anything to ``intent_code`` >>> surf_gii_img.agg_data() # surface file (array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) - >>> func_gii_img.agg_data() # functional file array([[545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , 539.53827 , - 541.3617 ], - [640.0118 , 634.727 , 630.03784 , ..., 635.21936 , 641.19586 , - 638.7647 ], - [612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , - 613.0585 ], - ..., - [101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, - 99.440796], - [371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , 363.44937 , - 363.10278 ], - [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , + ..., + [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , 259.7127 ]], dtype=float32) - When passig matching intend codes ``intent_code`` - >>> surf_gii_img.agg_data('pointset') # surface file + >>> surf_gii_img.agg_data('pointset') # surface pointset array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], [-17.61435 , -65.40164 , 21.071466]], dtype=float32) - + >>> surf_gii_img.agg_data('triangle') # surface triangle + array([0, 1, 2], dtype=int32) >>> func_gii_img.agg_data('time series') # functional file array([[545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , 539.53827 , 541.3617 ], - [640.0118 , 634.727 , 630.03784 , ..., 635.21936 , 641.19586 , - 638.7647 ], - [612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , - 613.0585 ], - ..., - [101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, - 99.440796], - [371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , 363.44937 , - 363.10278 ], - [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , + ..., + [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , 259.7127 ]], dtype=float32) - >>> surf_gii_img.agg_data('triangle') - array([0, 1, 2], dtype=int32) - When passing mismatching intent codes ``intent_code`` + When passing mismatching ``intent_code``, the function return a empty ``tuple`` >>> surf_gii_img.agg_data('time series') - () # return a empty ``tuple`` - + () >>> func_gii_img.agg_data('triangle') - () # return a empty ``tuple`` + () - When passing tuple ``intent_code`` + When passing tuple ``intent_code``, the output will follow + the order of ``intent_code`` in the tuple >>> surf_gii_img.agg_data(('pointset', 'triangle')) (array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) - >>> surf_gii_img.agg_data(('triangle', 'pointset')) (array([0, 1, 2], dtype=int32), array([[-16.07201 , -66.187515, 21.266994], [-16.705893, -66.05434 , 21.232786], [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) - - Parameters - ---------- - intent_code : None, string, integer or tuple of strings or integers, optional - code(s) specifying nifti intent - - Returns - ------- - tuple of ndarrays or ndarray - If the input is a tuple, the returned tuple will match the order. """ # Allow multiple intents to specify the order From d9ce63e0cc51dbd87d333a7cfe52c700ccdfbfa2 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Mon, 14 Oct 2019 16:31:46 -0700 Subject: [PATCH 301/689] testing for pathlib --- nibabel/tests/test_image_api.py | 25 ++++++++++++++----------- nibabel/tests/test_image_load_save.py | 16 +++++++++------- nibabel/tests/test_loadsave.py | 18 +++++++++++------- 3 files changed, 34 insertions(+), 25 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 979b8777f9..0650feda84 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -27,6 +27,7 @@ import warnings from functools import partial from itertools import product +import pathlib import numpy as np @@ -144,19 +145,21 @@ def validate_filenames(self, imaker, params): assert_almost_equal(img.get_data(), rt_rt_img.get_data()) # get_ / set_ filename fname = 'an_image' + self.standard_extension - img.set_filename(fname) - assert_equal(img.get_filename(), fname) - assert_equal(img.file_map['image'].filename, fname) + for path in (fname, pathlib.Path(fname)): + img.set_filename(path) + assert_equal(img.get_filename(), path) + assert_equal(img.file_map['image'].filename, path) # to_ / from_ filename fname = 'another_image' + self.standard_extension - with InTemporaryDirectory(): - img.to_filename(fname) - rt_img = img.__class__.from_filename(fname) - assert_array_equal(img.shape, rt_img.shape) - assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_img.get_data()) - del rt_img # to allow windows to delete the directory + for path in (fname, pathlib.Path(fname)): + with InTemporaryDirectory(): + img.to_filename(path) + rt_img = img.__class__.from_filename(path) + assert_array_equal(img.shape, rt_img.shape) + assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) + # get_data will be deprecated + assert_almost_equal(img.get_data(), rt_img.get_data()) + del rt_img # to allow windows to delete the directory def validate_no_slicing(self, imaker, params): img = imaker() diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 7101b6a31b..ce3db18113 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -12,6 +12,7 @@ import shutil from os.path import dirname, join as pjoin from tempfile import mkdtemp +import pathlib import numpy as np @@ -253,13 +254,14 @@ def test_filename_save(): try: pth = mkdtemp() fname = pjoin(pth, 'image' + out_ext) - nils.save(img, fname) - rt_img = nils.load(fname) - assert_array_almost_equal(rt_img.get_data(), data) - assert_true(type(rt_img) is loadklass) - # delete image to allow file close. Otherwise windows - # raises an error when trying to delete the directory - del rt_img + for path in (fname, pathlib.Path(fname)): + nils.save(img, path) + rt_img = nils.load(path) + assert_array_almost_equal(rt_img.get_data(), data) + assert_true(type(rt_img) is loadklass) + # delete image to allow file close. Otherwise windows + # raises an error when trying to delete the directory + del rt_img finally: shutil.rmtree(pth) diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 4c1c703389..fceee6813e 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -3,6 +3,7 @@ from os.path import dirname, join as pjoin import shutil +import pathlib import numpy as np @@ -26,13 +27,16 @@ def test_read_img_data(): - for fname in ('example4d.nii.gz', - 'example_nifti2.nii.gz', - 'minc1_1_scale.mnc', - 'minc1_4d.mnc', - 'test.mgz', - 'tiny.mnc' - ): + fnames_test = [ + 'example4d.nii.gz', + 'example_nifti2.nii.gz', + 'minc1_1_scale.mnc', + 'minc1_4d.mnc', + 'test.mgz', + 'tiny.mnc' + ] + fnames_test += [pathlib.Path(p) for p in fnames_test] + for fname in fnames_test: fpath = pjoin(data_path, fname) img = load(fpath) data = img.get_data() From 7887232c521f5cc77f09399db7eb5ea06c2b5b7e Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Mon, 14 Oct 2019 16:37:35 -0700 Subject: [PATCH 302/689] purge basestring --- nibabel/filename_parser.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index f6e23e2dce..553abd8fa7 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -9,11 +9,6 @@ ''' Create filename pairs, triplets etc, with expected extensions ''' import os -try: - basestring -except NameError: - basestring = str - import pathlib @@ -112,7 +107,7 @@ def types_filenames(template_fname, types_exts, >>> tfns == {'t1': '/path/test.funny', 't2': '/path/test.ext2'} True ''' - if not isinstance(template_fname, basestring): + if not isinstance(template_fname, str): raise TypesFilenamesError('Need file name as input ' 'to set_filenames') if template_fname.endswith('.'): From 7eaa3ae3df02f0c33a5bc78f75757cc03e7c247a Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Tue, 15 Oct 2019 12:11:57 -0400 Subject: [PATCH 303/689] BUG: Fix escape --- nibabel/openers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/openers.py b/nibabel/openers.py index e0706c4998..e37216e5f5 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -54,7 +54,7 @@ def _gzip_open(filename, mode='rb', compresslevel=9, keep_open=False): class Opener(object): - """ Class to accept, maybe open, and context-manage file-likes / filenames + r"""Class to accept, maybe open, and context-manage file-likes / filenames Provides context manager to close files that the constructor opened for you. From dd52697cbf2391608a28b5dafd11a88ecd8a373d Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Tue, 15 Oct 2019 12:18:15 -0400 Subject: [PATCH 304/689] BUG: More escapes --- nibabel/arraywriters.py | 8 ++++---- nibabel/brikhead.py | 4 ++-- nibabel/deprecator.py | 2 +- nibabel/externals/netcdf.py | 2 +- nibabel/fileholders.py | 2 +- nibabel/funcs.py | 3 ++- nibabel/loadsave.py | 2 +- nibabel/openers.py | 2 +- nibabel/streamlines/__init__.py | 2 +- nibabel/streamlines/tractogram.py | 4 ++-- 10 files changed, 16 insertions(+), 15 deletions(-) diff --git a/nibabel/arraywriters.py b/nibabel/arraywriters.py index fa2d1e4e1c..c5c0efb706 100644 --- a/nibabel/arraywriters.py +++ b/nibabel/arraywriters.py @@ -49,7 +49,7 @@ class ScalingError(WriterError): class ArrayWriter(object): def __init__(self, array, out_dtype=None, **kwargs): - """ Initialize array writer + r""" Initialize array writer Parameters ---------- @@ -246,7 +246,7 @@ class SlopeArrayWriter(ArrayWriter): def __init__(self, array, out_dtype=None, calc_scale=True, scaler_dtype=np.float32, **kwargs): - """ Initialize array writer + r""" Initialize array writer Parameters ---------- @@ -477,7 +477,7 @@ class SlopeInterArrayWriter(SlopeArrayWriter): def __init__(self, array, out_dtype=None, calc_scale=True, scaler_dtype=np.float32, **kwargs): - """ Initialize array writer + r""" Initialize array writer Parameters ---------- @@ -750,7 +750,7 @@ def get_slope_inter(writer): def make_array_writer(data, out_type, has_slope=True, has_intercept=True, **kwargs): - """ Make array writer instance for array `data` and output type `out_type` + r""" Make array writer instance for array `data` and output type `out_type` Parameters ---------- diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index c5847a87a8..3ddfd2e8ad 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -83,8 +83,8 @@ class AFNIHeaderError(HeaderDataError): DATA_OFFSET = 0 -TYPE_RE = re.compile('type\s*=\s*(string|integer|float)-attribute\s*\n') -NAME_RE = re.compile('name\s*=\s*(\w+)\s*\n') +TYPE_RE = re.compile(r'type\s*=\s*(string|integer|float)-attribute\s*\n') +NAME_RE = re.compile(r'name\s*=\s*(\w+)\s*\n') def _unpack_var(var): diff --git a/nibabel/deprecator.py b/nibabel/deprecator.py index 32a7c6835c..a0b7b8535a 100644 --- a/nibabel/deprecator.py +++ b/nibabel/deprecator.py @@ -5,7 +5,7 @@ import warnings import re -_LEADING_WHITE = re.compile('^(\s*)') +_LEADING_WHITE = re.compile(r'^(\s*)') class ExpiredDeprecationError(RuntimeError): diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index 30b30f5a7f..7adaf32dc1 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -776,7 +776,7 @@ def _read_att_values(self): values = self.fp.read(int(count)) self.fp.read(-count % 4) # read padding - if typecode is not 'c': + if typecode != 'c': values = frombuffer(values, dtype='>%s' % typecode).copy() if values.shape == (1,): values = values[0] diff --git a/nibabel/fileholders.py b/nibabel/fileholders.py index 5a858f1dbf..35cfd3c348 100644 --- a/nibabel/fileholders.py +++ b/nibabel/fileholders.py @@ -99,7 +99,7 @@ def file_like(self): def copy_file_map(file_map): - ''' Copy mapping of fileholders given by `file_map` + r''' Copy mapping of fileholders given by `file_map` Parameters ---------- diff --git a/nibabel/funcs.py b/nibabel/funcs.py index 178ac8191c..b5fa5d0b4b 100644 --- a/nibabel/funcs.py +++ b/nibabel/funcs.py @@ -87,7 +87,7 @@ def squeeze_image(img): def concat_images(images, check_affines=True, axis=None): - ''' Concatenate images in list to single image, along specified dimension + r''' Concatenate images in list to single image, along specified dimension Parameters ---------- @@ -101,6 +101,7 @@ def concat_images(images, check_affines=True, axis=None): be the same shape. If not None, concatenates on the specified dimension. This requires all images to be the same shape, except on the specified dimension. + Returns ------- concat_img : ``SpatialImage`` diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 421b95ba2f..d603dd619c 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -21,7 +21,7 @@ def load(filename, **kwargs): - ''' Load file given filename, guessing at file type + r''' Load file given filename, guessing at file type Parameters ---------- diff --git a/nibabel/openers.py b/nibabel/openers.py index e37216e5f5..a658c65c0a 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -54,7 +54,7 @@ def _gzip_open(filename, mode='rb', compresslevel=9, keep_open=False): class Opener(object): - r"""Class to accept, maybe open, and context-manage file-likes / filenames + r""" Class to accept, maybe open, and context-manage file-likes / filenames Provides context manager to close files that the constructor opened for you. diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index 7f999ca19b..102ad8fd01 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -96,7 +96,7 @@ def load(fileobj, lazy_load=False): def save(tractogram, filename, **kwargs): - """ Saves a tractogram to a file. + r""" Saves a tractogram to a file. Parameters ---------- diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index 11d72ac78a..c6687b82aa 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -20,7 +20,7 @@ def is_lazy_dict(obj): class SliceableDataDict(MutableMapping): - """ Dictionary for which key access can do slicing on the values. + r""" Dictionary for which key access can do slicing on the values. This container behaves like a standard dictionary but extends key access to allow keys for key access to be indices slicing into the contained ndarray @@ -73,7 +73,7 @@ def __len__(self): class PerArrayDict(SliceableDataDict): - """ Dictionary for which key access can do slicing on the values. + r""" Dictionary for which key access can do slicing on the values. This container behaves like a standard dictionary but extends key access to allow keys for key access to be indices slicing into the contained ndarray From 30844075209e488681db4ee0152a2322efd8b42f Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Tue, 15 Oct 2019 12:11:57 -0400 Subject: [PATCH 305/689] BUG: Fix escape --- nibabel/openers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/openers.py b/nibabel/openers.py index e551404561..84b348a4e5 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -111,7 +111,7 @@ def _gzip_open(filename, mode='rb', compresslevel=9, keep_open=False): class Opener(object): - """ Class to accept, maybe open, and context-manage file-likes / filenames + r"""Class to accept, maybe open, and context-manage file-likes / filenames Provides context manager to close files that the constructor opened for you. From ddab5468b12535b044b11837ee2776bb989bc341 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Tue, 15 Oct 2019 12:18:15 -0400 Subject: [PATCH 306/689] BUG: More escapes --- nibabel/arraywriters.py | 8 ++++---- nibabel/brikhead.py | 4 ++-- nibabel/deprecator.py | 2 +- nibabel/externals/netcdf.py | 2 +- nibabel/fileholders.py | 2 +- nibabel/funcs.py | 3 ++- nibabel/loadsave.py | 2 +- nibabel/openers.py | 2 +- nibabel/streamlines/__init__.py | 2 +- nibabel/streamlines/tractogram.py | 4 ++-- 10 files changed, 16 insertions(+), 15 deletions(-) diff --git a/nibabel/arraywriters.py b/nibabel/arraywriters.py index 2bd29e4ca4..485d447204 100644 --- a/nibabel/arraywriters.py +++ b/nibabel/arraywriters.py @@ -50,7 +50,7 @@ class ScalingError(WriterError): class ArrayWriter(object): def __init__(self, array, out_dtype=None, **kwargs): - """ Initialize array writer + r""" Initialize array writer Parameters ---------- @@ -247,7 +247,7 @@ class SlopeArrayWriter(ArrayWriter): def __init__(self, array, out_dtype=None, calc_scale=True, scaler_dtype=np.float32, **kwargs): - """ Initialize array writer + r""" Initialize array writer Parameters ---------- @@ -478,7 +478,7 @@ class SlopeInterArrayWriter(SlopeArrayWriter): def __init__(self, array, out_dtype=None, calc_scale=True, scaler_dtype=np.float32, **kwargs): - """ Initialize array writer + r""" Initialize array writer Parameters ---------- @@ -751,7 +751,7 @@ def get_slope_inter(writer): def make_array_writer(data, out_type, has_slope=True, has_intercept=True, **kwargs): - """ Make array writer instance for array `data` and output type `out_type` + r""" Make array writer instance for array `data` and output type `out_type` Parameters ---------- diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 1a12abfbca..a1d338b469 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -85,8 +85,8 @@ class AFNIHeaderError(HeaderDataError): DATA_OFFSET = 0 -TYPE_RE = re.compile('type\s*=\s*(string|integer|float)-attribute\s*\n') -NAME_RE = re.compile('name\s*=\s*(\w+)\s*\n') +TYPE_RE = re.compile(r'type\s*=\s*(string|integer|float)-attribute\s*\n') +NAME_RE = re.compile(r'name\s*=\s*(\w+)\s*\n') def _unpack_var(var): diff --git a/nibabel/deprecator.py b/nibabel/deprecator.py index 32a7c6835c..a0b7b8535a 100644 --- a/nibabel/deprecator.py +++ b/nibabel/deprecator.py @@ -5,7 +5,7 @@ import warnings import re -_LEADING_WHITE = re.compile('^(\s*)') +_LEADING_WHITE = re.compile(r'^(\s*)') class ExpiredDeprecationError(RuntimeError): diff --git a/nibabel/externals/netcdf.py b/nibabel/externals/netcdf.py index 3fcb4cd55a..1674f12b4c 100644 --- a/nibabel/externals/netcdf.py +++ b/nibabel/externals/netcdf.py @@ -779,7 +779,7 @@ def _read_att_values(self): values = self.fp.read(int(count)) self.fp.read(-count % 4) # read padding - if typecode is not 'c': + if typecode != 'c': values = frombuffer(values, dtype='>%s' % typecode).copy() if values.shape == (1,): values = values[0] diff --git a/nibabel/fileholders.py b/nibabel/fileholders.py index 5a858f1dbf..35cfd3c348 100644 --- a/nibabel/fileholders.py +++ b/nibabel/fileholders.py @@ -99,7 +99,7 @@ def file_like(self): def copy_file_map(file_map): - ''' Copy mapping of fileholders given by `file_map` + r''' Copy mapping of fileholders given by `file_map` Parameters ---------- diff --git a/nibabel/funcs.py b/nibabel/funcs.py index 240b20f802..54e69cc3e4 100644 --- a/nibabel/funcs.py +++ b/nibabel/funcs.py @@ -88,7 +88,7 @@ def squeeze_image(img): def concat_images(images, check_affines=True, axis=None): - ''' Concatenate images in list to single image, along specified dimension + r''' Concatenate images in list to single image, along specified dimension Parameters ---------- @@ -102,6 +102,7 @@ def concat_images(images, check_affines=True, axis=None): be the same shape. If not None, concatenates on the specified dimension. This requires all images to be the same shape, except on the specified dimension. + Returns ------- concat_img : ``SpatialImage`` diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 8c3041e73c..9c11423f72 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -22,7 +22,7 @@ def load(filename, **kwargs): - ''' Load file given filename, guessing at file type + r''' Load file given filename, guessing at file type Parameters ---------- diff --git a/nibabel/openers.py b/nibabel/openers.py index 84b348a4e5..dd4366bdf0 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -111,7 +111,7 @@ def _gzip_open(filename, mode='rb', compresslevel=9, keep_open=False): class Opener(object): - r"""Class to accept, maybe open, and context-manage file-likes / filenames + r""" Class to accept, maybe open, and context-manage file-likes / filenames Provides context manager to close files that the constructor opened for you. diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index 84d810367e..6c73714dcc 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -97,7 +97,7 @@ def load(fileobj, lazy_load=False): def save(tractogram, filename, **kwargs): - """ Saves a tractogram to a file. + r""" Saves a tractogram to a file. Parameters ---------- diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index 209ed27c26..5f98a1bcf1 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -25,7 +25,7 @@ def is_lazy_dict(obj): class SliceableDataDict(MutableMapping): - """ Dictionary for which key access can do slicing on the values. + r""" Dictionary for which key access can do slicing on the values. This container behaves like a standard dictionary but extends key access to allow keys for key access to be indices slicing into the contained ndarray @@ -78,7 +78,7 @@ def __len__(self): class PerArrayDict(SliceableDataDict): - """ Dictionary for which key access can do slicing on the values. + r""" Dictionary for which key access can do slicing on the values. This container behaves like a standard dictionary but extends key access to allow keys for key access to be indices slicing into the contained ndarray From 6c0497ee8357fdf9ef078cd0b4c0d5a011e82496 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Tue, 15 Oct 2019 17:51:00 -0700 Subject: [PATCH 307/689] move stringify guard to types_filenames --- nibabel/filebasedimages.py | 3 --- nibabel/filename_parser.py | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 4f84e0c1b4..d8af14a180 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -252,12 +252,10 @@ def set_filename(self, filename): ``.file_map`` attribute. Otherwise, the image instance will try and guess the other filenames from this given filename. ''' - filename = _stringify_path(filename) self.file_map = self.__class__.filespec_to_file_map(filename) @classmethod def from_filename(klass, filename): - filename = _stringify_path(filename) file_map = klass.filespec_to_file_map(filename) return klass.from_file_map(file_map) @@ -332,7 +330,6 @@ def to_filename(self, filename): ------- None ''' - filename = _stringify_path(filename) self.file_map = self.filespec_to_file_map(filename) self.to_file_map() diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index 553abd8fa7..e4e69cc843 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -107,6 +107,7 @@ def types_filenames(template_fname, types_exts, >>> tfns == {'t1': '/path/test.funny', 't2': '/path/test.ext2'} True ''' + template_fname = _stringify_path(template_fname) if not isinstance(template_fname, str): raise TypesFilenamesError('Need file name as input ' 'to set_filenames') From e0fff5c1edec58b60bb12b539416c26a08d61060 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Tue, 15 Oct 2019 17:56:18 -0700 Subject: [PATCH 308/689] update docstrings to accept str or os.PathLike --- nibabel/filebasedimages.py | 10 +++++----- nibabel/filename_parser.py | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index d8af14a180..3566af3e3d 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -246,7 +246,7 @@ def set_filename(self, filename): Parameters ---------- - filename : str + filename : str or os.PathLike If the image format only has one file associated with it, this will be the only filename set into the image ``.file_map`` attribute. Otherwise, the image instance will @@ -279,7 +279,7 @@ def filespec_to_file_map(klass, filespec): Parameters ---------- - filespec : str + filespec : str or os.PathLike Filename that might be for this image file type. Returns @@ -321,7 +321,7 @@ def to_filename(self, filename): Parameters ---------- - filename : str + filename : str or os.PathLike filename to which to save image. We will parse `filename` with ``filespec_to_file_map`` to work out names for image, header etc. @@ -419,7 +419,7 @@ def _sniff_meta_for(klass, filename, sniff_nbytes, sniff=None): Parameters ---------- - filename : str + filename : str or os.PathLike Filename for an image, or an image header (metadata) file. If `filename` points to an image data file, and the image type has a separate "header" file, we work out the name of the header file, @@ -466,7 +466,7 @@ def path_maybe_image(klass, filename, sniff=None, sniff_max=1024): Parameters ---------- - filename : str + filename : str or os.PathLike Filename for an image, or an image header (metadata) file. If `filename` points to an image data file, and the image type has a separate "header" file, we work out the name of the header file, diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index e4e69cc843..484dd9f011 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -21,7 +21,7 @@ def _stringify_path(filepath_or_buffer): Parameters ---------- - filepath_or_buffer : object to be converted + filepath_or_buffer : str or os.PathLike Returns ------- str_filepath_or_buffer : maybe a string version of the object @@ -56,7 +56,7 @@ def types_filenames(template_fname, types_exts, Parameters ---------- - template_fname : str + template_fname : str or os.PathLike template filename from which to construct output dict of filenames, with given `types_exts` type to extension mapping. If ``self.enforce_extensions`` is True, then filename must have one @@ -177,7 +177,7 @@ def parse_filename(filename, Parameters ---------- - filename : str + filename : str or os.PathLike filename in which to search for type extensions types_exts : sequence of sequences sequence of (name, extension) str sequences defining type to @@ -260,7 +260,7 @@ def splitext_addext(filename, Parameters ---------- - filename : str + filename : str or os.PathLike filename that may end in any or none of `addexts` match_case : bool, optional If True, match case of `addexts` and `filename`, otherwise do From b3adb15af8888304f8a09240a903321680000d7a Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 16 Oct 2019 09:05:02 +0100 Subject: [PATCH 309/689] docstring passed nosetests --- nibabel/gifti/gifti.py | 64 +++++++++++++++++++++++++----------------- 1 file changed, 39 insertions(+), 25 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 75a283410b..948e2213c8 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -684,7 +684,7 @@ def agg_data(self, intent_code=None): In the general case, the numpy data array is extracted from each ``GiftiDataArray`` object and returned in a ``tuple``, in the order they are found in the GIFTI image. - + If all ``GiftiDataArray`` s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``), then the data arrays are concatenated as columns, producing a vertex-by-time array. If an ``intent_code`` is passed, data arrays are filtered by the selected intents, @@ -715,57 +715,71 @@ def agg_data(self, intent_code=None): >>> from nibabel.testing import test_data >>> surf_gii_fname = test_data('gifti', 'ascii.gii') >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-xformspaceverage3_hemi-L.func.gii') + >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii') >>> func_gii_img = nib.load(func_gii_fname) When not passing anything to ``intent_code`` >>> surf_gii_img.agg_data() # surface file (array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32), - array([0, 1, 2], dtype=int32)) + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> func_gii_img.agg_data() # functional file - array([[545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , 539.53827 , - ..., - [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , - 259.7127 ]], dtype=float32) + array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , + 539.53827 , 541.3617 ], + [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , + 641.19586 , 638.7647 ], + [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , + 615.8239 , 613.0585 ], + ..., + [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , + 99.258316, 99.440796], + [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , + 363.44937 , 363.10278 ], + [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , + 257.8245 , 259.7127 ]], dtype=float32) When passig matching intend codes ``intent_code`` - >>> surf_gii_img.agg_data('pointset') # surface pointset + >>> surf_gii_img.agg_data('pointset') # surface pointset array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32) + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32) >>> surf_gii_img.agg_data('triangle') # surface triangle array([0, 1, 2], dtype=int32) >>> func_gii_img.agg_data('time series') # functional file - array([[545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , 539.53827 , - 541.3617 ], - ..., - [268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , 257.8245 , - 259.7127 ]], dtype=float32) - + array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , + 539.53827 , 541.3617 ], + [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , + 641.19586 , 638.7647 ], + [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , + 615.8239 , 613.0585 ], + ..., + [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , + 99.258316, 99.440796], + [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , + 363.44937 , 363.10278 ], + [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , + 257.8245 , 259.7127 ]], dtype=float32) When passing mismatching ``intent_code``, the function return a empty ``tuple`` >>> surf_gii_img.agg_data('time series') () - >>> func_gii_img.agg_data('triangle') + >>> func_gii_img.agg_data('triangle') () - When passing tuple ``intent_code``, the output will follow + When passing tuple ``intent_code``, the output will follow the order of ``intent_code`` in the tuple >>> surf_gii_img.agg_data(('pointset', 'triangle')) (array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32), - array([0, 1, 2], dtype=int32)) + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> surf_gii_img.agg_data(('triangle', 'pointset')) (array([0, 1, 2], dtype=int32), array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) """ # Allow multiple intents to specify the order From 785a8d3cdb3505cfbbd562dd3855afb23e9f11de Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 16 Oct 2019 09:30:36 +0100 Subject: [PATCH 310/689] fix: trailing white space and os separator --- nibabel/gifti/gifti.py | 4 ++-- nibabel/testing/__init__.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 948e2213c8..0c4197e368 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -731,7 +731,7 @@ def agg_data(self, intent_code=None): 641.19586 , 638.7647 ], [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , 613.0585 ], - ..., + ..., [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, 99.440796], [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , @@ -754,7 +754,7 @@ def agg_data(self, intent_code=None): 641.19586 , 638.7647 ], [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , 613.0585 ], - ..., + ..., [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, 99.440796], [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index c39468f2d4..fbd1128589 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -35,9 +35,9 @@ def test_data(subdir=None, fname=None): if subdir is None: - resource = 'tests/data' + resource = os.path.join('tests', 'data') elif subdir in ('gifti', 'nicom', 'externals'): - resource = '%s/tests/data' % subdir + resource = os.path.join(subdir, 'tests', 'data') else: raise ValueError("Unknown test data directory: %s" % subdir) From 995d834b1b0da4a030d95e9afd072d896ee5f969 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 16 Oct 2019 09:36:13 +0100 Subject: [PATCH 311/689] fix docstring output --- nibabel/gifti/gifti.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 0c4197e368..948e2213c8 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -731,7 +731,7 @@ def agg_data(self, intent_code=None): 641.19586 , 638.7647 ], [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , 613.0585 ], - ..., + ..., [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, 99.440796], [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , @@ -754,7 +754,7 @@ def agg_data(self, intent_code=None): 641.19586 , 638.7647 ], [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , 615.8239 , 613.0585 ], - ..., + ..., [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , 99.258316, 99.440796], [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , From 7ea7dec251217a3166c5dd28abe5e0d73a783a23 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 16 Oct 2019 09:59:24 +0100 Subject: [PATCH 312/689] fix os separater in the test --- nibabel/tests/test_testing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 18d9e17583..e9f2d079ea 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -158,7 +158,7 @@ def test_test_data(): os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'tests', 'data'))) for subdir in ('nicom', 'gifti', 'externals'): - assert_equal(test_data(subdir), data_path[:-10] + '%s/tests/data' % subdir) + assert_equal(test_data(subdir), os.path.join(data_path[:-10], subdir, 'tests', 'data')) assert_true(os.path.exists(test_data(subdir))) assert_false(os.path.exists(test_data(subdir, 'doesnotexist'))) From f6336765dc1aa7b1ae2f2bbae5e4a1af12ba1fe2 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 16 Oct 2019 11:11:43 +0100 Subject: [PATCH 313/689] Rename example file --- nibabel/gifti/gifti.py | 2 +- ...ejudgment_space-fsaverage3_hemi-L.func.gii => task.func.gii} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename nibabel/gifti/tests/data/{sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii => task.func.gii} (100%) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 948e2213c8..8a68dfd4c9 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -715,7 +715,7 @@ def agg_data(self, intent_code=None): >>> from nibabel.testing import test_data >>> surf_gii_fname = test_data('gifti', 'ascii.gii') >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii') + >>> func_gii_fname = test_data('gifti', 'task.func.gii') >>> func_gii_img = nib.load(func_gii_fname) When not passing anything to ``intent_code`` diff --git a/nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii b/nibabel/gifti/tests/data/task.func.gii similarity index 100% rename from nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii rename to nibabel/gifti/tests/data/task.func.gii From 6056014b6558913a9adc93c989f68734edab05a6 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Wed, 16 Oct 2019 13:33:19 -0700 Subject: [PATCH 314/689] update _stringify_path doc --- nibabel/filename_parser.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index 484dd9f011..45de93aef9 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -22,6 +22,7 @@ def _stringify_path(filepath_or_buffer): Parameters ---------- filepath_or_buffer : str or os.PathLike + Returns ------- str_filepath_or_buffer : maybe a string version of the object From bf6eb97fb5adb86375b3ef2c3f4a82d77421f380 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 16 Oct 2019 22:47:13 +0100 Subject: [PATCH 315/689] Changing numpy float print style to 1.13 --- nibabel/__init__.py | 7 ++-- nibabel/gifti/gifti.py | 73 +++++++++++++++++++++--------------------- 2 files changed, 42 insertions(+), 38 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 3e57643fc1..38d06e88b4 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -41,6 +41,7 @@ # Numpy changed print options in 1.14; we can update docstrings and remove # these when our minimum for building docs exceeds that 'legacy_printopt': None, + 'floatmode': None, } def setup_package(): @@ -50,13 +51,15 @@ def setup_package(): if LooseVersion(np.__version__) >= LooseVersion('1.14'): if _test_states.get('legacy_printopt') is None: _test_states['legacy_printopt'] = np.get_printoptions().get('legacy') - np.set_printoptions(legacy="1.13") + _test_states['floatmode'] = np.get_printoptions().get('floatmode') + np.set_printoptions(legacy="1.13", floatmode='fixed') def teardown_package(): """ Reset print options when tests finish """ import numpy as np if _test_states.get('legacy_printopt') is not None: - np.set_printoptions(legacy=_test_states.pop('legacy_printopt')) + np.set_printoptions(legacy=_test_states.pop('legacy_printopt'), + floatmode=_test_states.pop('floatmode')) # module imports diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 8a68dfd4c9..8a732fe710 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -721,46 +721,46 @@ def agg_data(self, intent_code=None): When not passing anything to ``intent_code`` >>> surf_gii_img.agg_data() # surface file - (array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) + (array([[-16.07201004, -66.18751526, 21.26699448], + [-16.70589256, -66.05433655, 21.23278618], + [-17.61434937, -65.40164185, 21.07146645]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> func_gii_img.agg_data() # functional file - array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , - 539.53827 , 541.3617 ], - [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , - 641.19586 , 638.7647 ], - [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , - 615.8239 , 613.0585 ], + array([[ 545.43261719, 535.84710693, 537.50140381, ..., 540.27618408, + 539.53826904, 541.36169434], + [ 640.01177979, 634.72698975, 630.03784180, ..., 635.21936035, + 641.19586182, 638.76470947], + [ 612.90557861, 607.32281494, 606.13549805, ..., 608.24407959, + 615.82391357, 613.05847168], ..., - [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , - 99.258316, 99.440796], - [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , - 363.44937 , 363.10278 ], - [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , - 257.8245 , 259.7127 ]], dtype=float32) + [ 101.28482056, 101.41191864, 99.21212769, ..., 100.47232056, + 99.25831604, 99.44079590], + [ 371.81591797, 367.02896118, 363.90206909, ..., 365.52597046, + 363.44937134, 363.10278320], + [ 268.65209961, 262.02120972, 259.06716919, ..., 262.53808594, + 257.82449341, 259.71270752]], dtype=float32) When passig matching intend codes ``intent_code`` >>> surf_gii_img.agg_data('pointset') # surface pointset - array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32) + array([[-16.07201004, -66.18751526, 21.26699448], + [-16.70589256, -66.05433655, 21.23278618], + [-17.61434937, -65.40164185, 21.07146645]], dtype=float32) >>> surf_gii_img.agg_data('triangle') # surface triangle array([0, 1, 2], dtype=int32) >>> func_gii_img.agg_data('time series') # functional file - array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , - 539.53827 , 541.3617 ], - [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , - 641.19586 , 638.7647 ], - [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , - 615.8239 , 613.0585 ], + array([[ 545.43261719, 535.84710693, 537.50140381, ..., 540.27618408, + 539.53826904, 541.36169434], + [ 640.01177979, 634.72698975, 630.03784180, ..., 635.21936035, + 641.19586182, 638.76470947], + [ 612.90557861, 607.32281494, 606.13549805, ..., 608.24407959, + 615.82391357, 613.05847168], ..., - [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , - 99.258316, 99.440796], - [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , - 363.44937 , 363.10278 ], - [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , - 257.8245 , 259.7127 ]], dtype=float32) + [ 101.28482056, 101.41191864, 99.21212769, ..., 100.47232056, + 99.25831604, 99.44079590], + [ 371.81591797, 367.02896118, 363.90206909, ..., 365.52597046, + 363.44937134, 363.10278320], + [ 268.65209961, 262.02120972, 259.06716919, ..., 262.53808594, + 257.82449341, 259.71270752]], dtype=float32) When passing mismatching ``intent_code``, the function return a empty ``tuple`` @@ -773,13 +773,14 @@ def agg_data(self, intent_code=None): the order of ``intent_code`` in the tuple >>> surf_gii_img.agg_data(('pointset', 'triangle')) - (array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) + (array([[-16.07201004, -66.18751526, 21.26699448], + [-16.70589256, -66.05433655, 21.23278618], + [-17.61434937, -65.40164185, 21.07146645]], dtype=float32), array([0, 1, 2], dtype=int32)) + >>> surf_gii_img.agg_data(('triangle', 'pointset')) - (array([0, 1, 2], dtype=int32), array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) + (array([0, 1, 2], dtype=int32), array([[-16.07201004, -66.18751526, 21.26699448], + [-16.70589256, -66.05433655, 21.23278618], + [-17.61434937, -65.40164185, 21.07146645]], dtype=float32)) """ # Allow multiple intents to specify the order From 0cb7c39ca1f8ec9b61dbd391eafdeee3cb826420 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Wed, 16 Oct 2019 12:31:54 -0700 Subject: [PATCH 316/689] update docstrings to accept str or os.PathLike --- nibabel/loadsave.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 5d740bd178..f8c3e3be0b 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -25,7 +25,7 @@ def load(filename, **kwargs): Parameters ---------- - filename : string + filename : str or os.PathLike specification of file to load \*\*kwargs : keyword arguments Keyword arguments to format-specific load @@ -89,7 +89,7 @@ def save(img, filename): ---------- img : ``SpatialImage`` image to save - filename : str + filename : str or os.PathLike filename (often implying filenames) to which to save `img`. Returns From ca10c447b07c088ba649fb2250129638796bdb44 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Wed, 16 Oct 2019 12:35:03 -0700 Subject: [PATCH 317/689] mghformat accept pathlib for filespec_to_file_map --- nibabel/freesurfer/mghformat.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index ddb30cb796..6eb0f156e9 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -17,6 +17,7 @@ from ..volumeutils import (array_to_file, array_from_file, endian_codes, Recoder) from ..filebasedimages import SerializableImage +from ..filename_parser import _stringify_path from ..spatialimages import HeaderDataError, SpatialImage from ..fileholders import FileHolder from ..arrayproxy import ArrayProxy, reshape_dataobj @@ -529,6 +530,7 @@ def __init__(self, dataobj, affine, header=None, @classmethod def filespec_to_file_map(klass, filespec): + filespec = _stringify_path(filespec) """ Check for compressed .mgz format, then .mgh format """ if splitext(filespec)[1].lower() == '.mgz': return dict(image=FileHolder(filename=filespec)) From e98dbfc09149a9a97405112a2101bfc2075a4ff8 Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Wed, 16 Oct 2019 13:06:54 -0700 Subject: [PATCH 318/689] tests pathlib compatible --- nibabel/tests/test_image_api.py | 4 ++-- nibabel/tests/test_image_load_save.py | 2 +- nibabel/tests/test_loadsave.py | 12 +++++++++--- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 27aa3abc36..3b921b9fb9 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -144,8 +144,8 @@ def validate_filenames(self, imaker, params): fname = 'an_image' + self.standard_extension for path in (fname, pathlib.Path(fname)): img.set_filename(path) - assert_equal(img.get_filename(), path) - assert_equal(img.file_map['image'].filename, path) + assert_equal(img.get_filename(), str(path)) + assert_equal(img.file_map['image'].filename, str(path)) # to_ / from_ filename fname = 'another_image' + self.standard_extension for path in (fname, pathlib.Path(fname)): diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index f60ea54ea0..9d58a3ed60 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -259,7 +259,7 @@ def test_filename_save(): for path in (fname, pathlib.Path(fname)): nils.save(img, path) rt_img = nils.load(path) - assert_array_almost_equal(rt_img.get_data(), data) + assert_array_almost_equal(rt_img.get_fdata(), data) assert_true(type(rt_img) is loadklass) # delete image to allow file close. Otherwise windows # raises an error when trying to delete the directory diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 1925422590..3d7101b6d3 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -37,7 +37,10 @@ def test_read_img_data(): ] fnames_test += [pathlib.Path(p) for p in fnames_test] for fname in fnames_test: - fpath = pjoin(data_path, fname) + # os.path.join doesnt work between str / os.PathLike in py3.5 + fpath = pjoin(data_path, str(fname)) + if isinstance(fname, pathlib.Path): + fpath = pathlib.Path(fpath) img = load(fpath) data = img.get_fdata() data2 = read_img_data(img) @@ -49,8 +52,11 @@ def test_read_img_data(): assert_array_equal(read_img_data(img, prefer='unscaled'), data) # Assert all caps filename works as well with TemporaryDirectory() as tmpdir: - up_fpath = pjoin(tmpdir, fname.upper()) - shutil.copyfile(fpath, up_fpath) + up_fpath = pjoin(tmpdir, str(fname).upper()) + if isinstance(fname, pathlib.Path): + up_fpath = pathlib.Path(up_fpath) + # shutil doesnt work with os.PathLike in py3.5 + shutil.copyfile(str(fpath), str(up_fpath)) img = load(up_fpath) assert_array_equal(img.dataobj, data) del img From 63945366103eebacff77203d02fbfd3643a4645c Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Wed, 16 Oct 2019 15:29:53 -0700 Subject: [PATCH 319/689] update _stringify_path doc --- nibabel/filename_parser.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index 45de93aef9..b8a03060c0 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -25,13 +25,14 @@ def _stringify_path(filepath_or_buffer): Returns ------- - str_filepath_or_buffer : maybe a string version of the object + str_filepath_or_buffer : str + Notes ----- Objects supporting the fspath protocol (python 3.6+) are coerced according to its __fspath__ method. - For backwards compatibility with older pythons, pathlib.Path and - py.path objects are specially coerced. + For backwards compatibility with older pythons, pathlib.Path objects + are specially coerced. Any other object is passed through unchanged, which includes bytes, strings, buffers, or anything else that's not even path-like. From a9471a0dbe9fd60fb7eb849d4e2b8f8f47b2cd88 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:13:20 +0100 Subject: [PATCH 320/689] Revert "Rename example file" This reverts commit f6336765dc1aa7b1ae2f2bbae5e4a1af12ba1fe2. --- nibabel/gifti/gifti.py | 2 +- ... sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename nibabel/gifti/tests/data/{task.func.gii => sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii} (100%) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 8a732fe710..8c3a5baca5 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -715,7 +715,7 @@ def agg_data(self, intent_code=None): >>> from nibabel.testing import test_data >>> surf_gii_fname = test_data('gifti', 'ascii.gii') >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'task.func.gii') + >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii') >>> func_gii_img = nib.load(func_gii_fname) When not passing anything to ``intent_code`` diff --git a/nibabel/gifti/tests/data/task.func.gii b/nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii similarity index 100% rename from nibabel/gifti/tests/data/task.func.gii rename to nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii From 9a52b789c1ff93cae5e8cd239633457cc75eb221 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:16:36 +0100 Subject: [PATCH 321/689] Revert "Changing numpy float print style to 1.13" This reverts commit bf6eb97fb5adb86375b3ef2c3f4a82d77421f380. --- nibabel/__init__.py | 7 ++-- nibabel/gifti/gifti.py | 73 +++++++++++++++++++++--------------------- 2 files changed, 38 insertions(+), 42 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 38d06e88b4..3e57643fc1 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -41,7 +41,6 @@ # Numpy changed print options in 1.14; we can update docstrings and remove # these when our minimum for building docs exceeds that 'legacy_printopt': None, - 'floatmode': None, } def setup_package(): @@ -51,15 +50,13 @@ def setup_package(): if LooseVersion(np.__version__) >= LooseVersion('1.14'): if _test_states.get('legacy_printopt') is None: _test_states['legacy_printopt'] = np.get_printoptions().get('legacy') - _test_states['floatmode'] = np.get_printoptions().get('floatmode') - np.set_printoptions(legacy="1.13", floatmode='fixed') + np.set_printoptions(legacy="1.13") def teardown_package(): """ Reset print options when tests finish """ import numpy as np if _test_states.get('legacy_printopt') is not None: - np.set_printoptions(legacy=_test_states.pop('legacy_printopt'), - floatmode=_test_states.pop('floatmode')) + np.set_printoptions(legacy=_test_states.pop('legacy_printopt')) # module imports diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 8c3a5baca5..948e2213c8 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -721,46 +721,46 @@ def agg_data(self, intent_code=None): When not passing anything to ``intent_code`` >>> surf_gii_img.agg_data() # surface file - (array([[-16.07201004, -66.18751526, 21.26699448], - [-16.70589256, -66.05433655, 21.23278618], - [-17.61434937, -65.40164185, 21.07146645]], dtype=float32), array([0, 1, 2], dtype=int32)) + (array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> func_gii_img.agg_data() # functional file - array([[ 545.43261719, 535.84710693, 537.50140381, ..., 540.27618408, - 539.53826904, 541.36169434], - [ 640.01177979, 634.72698975, 630.03784180, ..., 635.21936035, - 641.19586182, 638.76470947], - [ 612.90557861, 607.32281494, 606.13549805, ..., 608.24407959, - 615.82391357, 613.05847168], + array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , + 539.53827 , 541.3617 ], + [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , + 641.19586 , 638.7647 ], + [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , + 615.8239 , 613.0585 ], ..., - [ 101.28482056, 101.41191864, 99.21212769, ..., 100.47232056, - 99.25831604, 99.44079590], - [ 371.81591797, 367.02896118, 363.90206909, ..., 365.52597046, - 363.44937134, 363.10278320], - [ 268.65209961, 262.02120972, 259.06716919, ..., 262.53808594, - 257.82449341, 259.71270752]], dtype=float32) + [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , + 99.258316, 99.440796], + [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , + 363.44937 , 363.10278 ], + [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , + 257.8245 , 259.7127 ]], dtype=float32) When passig matching intend codes ``intent_code`` >>> surf_gii_img.agg_data('pointset') # surface pointset - array([[-16.07201004, -66.18751526, 21.26699448], - [-16.70589256, -66.05433655, 21.23278618], - [-17.61434937, -65.40164185, 21.07146645]], dtype=float32) + array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32) >>> surf_gii_img.agg_data('triangle') # surface triangle array([0, 1, 2], dtype=int32) >>> func_gii_img.agg_data('time series') # functional file - array([[ 545.43261719, 535.84710693, 537.50140381, ..., 540.27618408, - 539.53826904, 541.36169434], - [ 640.01177979, 634.72698975, 630.03784180, ..., 635.21936035, - 641.19586182, 638.76470947], - [ 612.90557861, 607.32281494, 606.13549805, ..., 608.24407959, - 615.82391357, 613.05847168], + array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , + 539.53827 , 541.3617 ], + [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , + 641.19586 , 638.7647 ], + [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , + 615.8239 , 613.0585 ], ..., - [ 101.28482056, 101.41191864, 99.21212769, ..., 100.47232056, - 99.25831604, 99.44079590], - [ 371.81591797, 367.02896118, 363.90206909, ..., 365.52597046, - 363.44937134, 363.10278320], - [ 268.65209961, 262.02120972, 259.06716919, ..., 262.53808594, - 257.82449341, 259.71270752]], dtype=float32) + [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , + 99.258316, 99.440796], + [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , + 363.44937 , 363.10278 ], + [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , + 257.8245 , 259.7127 ]], dtype=float32) When passing mismatching ``intent_code``, the function return a empty ``tuple`` @@ -773,14 +773,13 @@ def agg_data(self, intent_code=None): the order of ``intent_code`` in the tuple >>> surf_gii_img.agg_data(('pointset', 'triangle')) - (array([[-16.07201004, -66.18751526, 21.26699448], - [-16.70589256, -66.05433655, 21.23278618], - [-17.61434937, -65.40164185, 21.07146645]], dtype=float32), array([0, 1, 2], dtype=int32)) - + (array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> surf_gii_img.agg_data(('triangle', 'pointset')) - (array([0, 1, 2], dtype=int32), array([[-16.07201004, -66.18751526, 21.26699448], - [-16.70589256, -66.05433655, 21.23278618], - [-17.61434937, -65.40164185, 21.07146645]], dtype=float32)) + (array([0, 1, 2], dtype=int32), array([[-16.07201 , -66.187515, 21.266994], + [-16.705893, -66.05434 , 21.232786], + [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) """ # Allow multiple intents to specify the order From 0447bbcfbebb3bf445fc7b6215cd3c4a46dbce74 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:17:55 +0100 Subject: [PATCH 322/689] Revert "Revert "Rename example file"" This reverts commit a9471a0dbe9fd60fb7eb849d4e2b8f8f47b2cd88. --- nibabel/gifti/gifti.py | 2 +- ...ejudgment_space-fsaverage3_hemi-L.func.gii => task.func.gii} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename nibabel/gifti/tests/data/{sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii => task.func.gii} (100%) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 948e2213c8..8a68dfd4c9 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -715,7 +715,7 @@ def agg_data(self, intent_code=None): >>> from nibabel.testing import test_data >>> surf_gii_fname = test_data('gifti', 'ascii.gii') >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii') + >>> func_gii_fname = test_data('gifti', 'task.func.gii') >>> func_gii_img = nib.load(func_gii_fname) When not passing anything to ``intent_code`` diff --git a/nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii b/nibabel/gifti/tests/data/task.func.gii similarity index 100% rename from nibabel/gifti/tests/data/sub-01_task-rhymejudgment_space-fsaverage3_hemi-L.func.gii rename to nibabel/gifti/tests/data/task.func.gii From 1ecaa2655ecde11df38dddeb067e9cc2f2843b31 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:37:08 +0100 Subject: [PATCH 323/689] Move the docstring to test --- nibabel/gifti/gifti.py | 41 -------------------- nibabel/gifti/tests/test_gifti.py | 64 ++++++++++++++++++++++++++++++- 2 files changed, 63 insertions(+), 42 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 8a68dfd4c9..f7cecb9cdb 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -721,65 +721,24 @@ def agg_data(self, intent_code=None): When not passing anything to ``intent_code`` >>> surf_gii_img.agg_data() # surface file - (array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> func_gii_img.agg_data() # functional file - array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , - 539.53827 , 541.3617 ], - [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , - 641.19586 , 638.7647 ], - [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , - 615.8239 , 613.0585 ], - ..., - [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , - 99.258316, 99.440796], - [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , - 363.44937 , 363.10278 ], - [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , - 257.8245 , 259.7127 ]], dtype=float32) When passig matching intend codes ``intent_code`` >>> surf_gii_img.agg_data('pointset') # surface pointset - array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32) >>> surf_gii_img.agg_data('triangle') # surface triangle - array([0, 1, 2], dtype=int32) >>> func_gii_img.agg_data('time series') # functional file - array([[ 545.4326 , 535.8471 , 537.5014 , ..., 540.2762 , - 539.53827 , 541.3617 ], - [ 640.0118 , 634.727 , 630.03784 , ..., 635.21936 , - 641.19586 , 638.7647 ], - [ 612.9056 , 607.3228 , 606.1355 , ..., 608.2441 , - 615.8239 , 613.0585 ], - ..., - [ 101.28482 , 101.41192 , 99.21213 , ..., 100.47232 , - 99.258316, 99.440796], - [ 371.81592 , 367.02896 , 363.90207 , ..., 365.52597 , - 363.44937 , 363.10278 ], - [ 268.6521 , 262.0212 , 259.06717 , ..., 262.5381 , - 257.8245 , 259.7127 ]], dtype=float32) When passing mismatching ``intent_code``, the function return a empty ``tuple`` >>> surf_gii_img.agg_data('time series') - () >>> func_gii_img.agg_data('triangle') - () When passing tuple ``intent_code``, the output will follow the order of ``intent_code`` in the tuple >>> surf_gii_img.agg_data(('pointset', 'triangle')) - (array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32), array([0, 1, 2], dtype=int32)) >>> surf_gii_img.agg_data(('triangle', 'pointset')) - (array([0, 1, 2], dtype=int32), array([[-16.07201 , -66.187515, 21.266994], - [-16.705893, -66.05434 , 21.232786], - [-17.61435 , -65.40164 , 21.071466]], dtype=float32)) """ # Allow multiple intents to specify the order diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index e7bc5f0a16..ced6701aa1 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -17,12 +17,74 @@ from numpy.testing import (assert_array_almost_equal, assert_array_equal) from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) -from nibabel.testing import clear_and_catch_warnings +from nibabel.testing import clear_and_catch_warnings, test_data from .test_parse_gifti_fast import (DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6) import itertools +def test_agg_data(): + """ + Aggregate GIFTI data arrays into an ndarray or tuple of ndarray + + Examples + -------- + + Load two kinds of Gifti files: a surface file containing two types of intent code; + a functional file storing time series data only. + + >>> import nibabel as nib + >>> from nibabel.testing import test_data + >>> surf_gii_fname = test_data('gifti', 'ascii.gii') + >>> surf_gii_img = nib.load(surf_gii_fname) + >>> func_gii_fname = test_data('gifti', 'task.func.gii') + >>> func_gii_img = nib.load(func_gii_fname) + + When not passing anything to ``intent_code`` + + >>> surf_gii_img.agg_data() # surface file + >>> func_gii_img.agg_data() # functional file + + When passig matching intend codes ``intent_code`` + + >>> surf_gii_img.agg_data('pointset') # surface pointset + >>> surf_gii_img.agg_data('triangle') # surface triangle + >>> func_gii_img.agg_data('time series') # functional file + + When passing mismatching ``intent_code``, the function return a empty ``tuple`` + + >>> surf_gii_img.agg_data('time series') + >>> func_gii_img.agg_data('triangle') + + When passing tuple ``intent_code``, the output will follow + the order of ``intent_code`` in the tuple + + >>> surf_gii_img.agg_data(('pointset', 'triangle')) + >>> surf_gii_img.agg_data(('triangle', 'pointset')) + """ + + surf_gii_fname = test_data('gifti', 'ascii.gii') + surf_gii_img = nib.load(surf_gii_fname) + func_gii_fname = test_data('gifti', 'task.func.gii') + func_gii_img = nib.load(func_gii_fname) + + point_data = surf_gii_img.get_arrays_from_intent('pointset')[0].data + triangle_data = surf_gii_img.get_arrays_from_intent('triangle')[0].data + func_da = func_gii_img.get_arrays_from_intent('time series') + func_data = np.column_stack(tuple(da.data for da in func_da)) + + assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) # surface file + assert_array_equal(func_gii_img.agg_data(), func_data) # functional + assert_array_equal(surf_gii_img.agg_data('pointset'), point_data) # surface pointset + assert_array_equal(surf_gii_img.agg_data('triangle'), triangle_data) # surface triangle + assert_array_equal(func_gii_img.agg_data('time series'), func_data) # functional + + assert_equal(surf_gii_img.agg_data('time series'), ()) + assert_equal(func_gii_img.agg_data('triangle'), ()) + + assert_equal(surf_gii_img.agg_data(('pointset', 'triangle')), (point_data, triangle_data)) + assert_equal(surf_gii_img.agg_data(('triangle', 'pointset')), (triangle_data, point_data)) + def test_gifti_image(): # Check that we're not modifying the default empty list in the default # arguments. From 384475ba88997390f9bc1704b2e6eb1638279642 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:48:50 +0100 Subject: [PATCH 324/689] Remove the actual docstring to prevent errors --- nibabel/gifti/tests/test_gifti.py | 43 +++++-------------------------- 1 file changed, 7 insertions(+), 36 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index ced6701aa1..33356f8679 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -25,42 +25,7 @@ def test_agg_data(): """ - Aggregate GIFTI data arrays into an ndarray or tuple of ndarray - - Examples - -------- - - Load two kinds of Gifti files: a surface file containing two types of intent code; - a functional file storing time series data only. - - >>> import nibabel as nib - >>> from nibabel.testing import test_data - >>> surf_gii_fname = test_data('gifti', 'ascii.gii') - >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'task.func.gii') - >>> func_gii_img = nib.load(func_gii_fname) - - When not passing anything to ``intent_code`` - - >>> surf_gii_img.agg_data() # surface file - >>> func_gii_img.agg_data() # functional file - - When passig matching intend codes ``intent_code`` - - >>> surf_gii_img.agg_data('pointset') # surface pointset - >>> surf_gii_img.agg_data('triangle') # surface triangle - >>> func_gii_img.agg_data('time series') # functional file - - When passing mismatching ``intent_code``, the function return a empty ``tuple`` - - >>> surf_gii_img.agg_data('time series') - >>> func_gii_img.agg_data('triangle') - - When passing tuple ``intent_code``, the output will follow - the order of ``intent_code`` in the tuple - - >>> surf_gii_img.agg_data(('pointset', 'triangle')) - >>> surf_gii_img.agg_data(('triangle', 'pointset')) + Move examples of aggregate GIFTI data arrays into an ndarray or tuple of ndarray """ surf_gii_fname = test_data('gifti', 'ascii.gii') @@ -73,15 +38,21 @@ def test_agg_data(): func_da = func_gii_img.get_arrays_from_intent('time series') func_data = np.column_stack(tuple(da.data for da in func_da)) + # When not passing anything to ``intent_code`` assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) # surface file assert_array_equal(func_gii_img.agg_data(), func_data) # functional + + # When passig matching intend codes ``intent_code`` assert_array_equal(surf_gii_img.agg_data('pointset'), point_data) # surface pointset assert_array_equal(surf_gii_img.agg_data('triangle'), triangle_data) # surface triangle assert_array_equal(func_gii_img.agg_data('time series'), func_data) # functional + # When passing mismatching ``intent_code``, the function return a empty ``tuple`` assert_equal(surf_gii_img.agg_data('time series'), ()) assert_equal(func_gii_img.agg_data('triangle'), ()) + # When passing tuple ``intent_code``, the output will follow + # the order of ``intent_code`` in the tuple assert_equal(surf_gii_img.agg_data(('pointset', 'triangle')), (point_data, triangle_data)) assert_equal(surf_gii_img.agg_data(('triangle', 'pointset')), (triangle_data, point_data)) From 3fb70031f11339a73dd0f0913928dff3d1791aaa Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:56:44 +0100 Subject: [PATCH 325/689] Revert "Remove the actual docstring to prevent errors" This reverts commit 384475ba88997390f9bc1704b2e6eb1638279642. --- nibabel/gifti/tests/test_gifti.py | 43 ++++++++++++++++++++++++++----- 1 file changed, 36 insertions(+), 7 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 33356f8679..ced6701aa1 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -25,7 +25,42 @@ def test_agg_data(): """ - Move examples of aggregate GIFTI data arrays into an ndarray or tuple of ndarray + Aggregate GIFTI data arrays into an ndarray or tuple of ndarray + + Examples + -------- + + Load two kinds of Gifti files: a surface file containing two types of intent code; + a functional file storing time series data only. + + >>> import nibabel as nib + >>> from nibabel.testing import test_data + >>> surf_gii_fname = test_data('gifti', 'ascii.gii') + >>> surf_gii_img = nib.load(surf_gii_fname) + >>> func_gii_fname = test_data('gifti', 'task.func.gii') + >>> func_gii_img = nib.load(func_gii_fname) + + When not passing anything to ``intent_code`` + + >>> surf_gii_img.agg_data() # surface file + >>> func_gii_img.agg_data() # functional file + + When passig matching intend codes ``intent_code`` + + >>> surf_gii_img.agg_data('pointset') # surface pointset + >>> surf_gii_img.agg_data('triangle') # surface triangle + >>> func_gii_img.agg_data('time series') # functional file + + When passing mismatching ``intent_code``, the function return a empty ``tuple`` + + >>> surf_gii_img.agg_data('time series') + >>> func_gii_img.agg_data('triangle') + + When passing tuple ``intent_code``, the output will follow + the order of ``intent_code`` in the tuple + + >>> surf_gii_img.agg_data(('pointset', 'triangle')) + >>> surf_gii_img.agg_data(('triangle', 'pointset')) """ surf_gii_fname = test_data('gifti', 'ascii.gii') @@ -38,21 +73,15 @@ def test_agg_data(): func_da = func_gii_img.get_arrays_from_intent('time series') func_data = np.column_stack(tuple(da.data for da in func_da)) - # When not passing anything to ``intent_code`` assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) # surface file assert_array_equal(func_gii_img.agg_data(), func_data) # functional - - # When passig matching intend codes ``intent_code`` assert_array_equal(surf_gii_img.agg_data('pointset'), point_data) # surface pointset assert_array_equal(surf_gii_img.agg_data('triangle'), triangle_data) # surface triangle assert_array_equal(func_gii_img.agg_data('time series'), func_data) # functional - # When passing mismatching ``intent_code``, the function return a empty ``tuple`` assert_equal(surf_gii_img.agg_data('time series'), ()) assert_equal(func_gii_img.agg_data('triangle'), ()) - # When passing tuple ``intent_code``, the output will follow - # the order of ``intent_code`` in the tuple assert_equal(surf_gii_img.agg_data(('pointset', 'triangle')), (point_data, triangle_data)) assert_equal(surf_gii_img.agg_data(('triangle', 'pointset')), (triangle_data, point_data)) From 08a875293190d03e0ae4184d65ec638e7eddc793 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 16:57:41 +0100 Subject: [PATCH 326/689] Remove docstring in agg_data --- nibabel/gifti/gifti.py | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index f7cecb9cdb..609807e2f9 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -704,41 +704,6 @@ def agg_data(self, intent_code=None): ------- tuple of ndarrays or ndarray If the input is a tuple, the returned tuple will match the order. - - Examples - -------- - - Load two kinds of Gifti files: a surface file containing two types of intent code; - a functional file storing time series data only. - - >>> import nibabel as nib - >>> from nibabel.testing import test_data - >>> surf_gii_fname = test_data('gifti', 'ascii.gii') - >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'task.func.gii') - >>> func_gii_img = nib.load(func_gii_fname) - - When not passing anything to ``intent_code`` - - >>> surf_gii_img.agg_data() # surface file - >>> func_gii_img.agg_data() # functional file - - When passig matching intend codes ``intent_code`` - - >>> surf_gii_img.agg_data('pointset') # surface pointset - >>> surf_gii_img.agg_data('triangle') # surface triangle - >>> func_gii_img.agg_data('time series') # functional file - - When passing mismatching ``intent_code``, the function return a empty ``tuple`` - - >>> surf_gii_img.agg_data('time series') - >>> func_gii_img.agg_data('triangle') - - When passing tuple ``intent_code``, the output will follow - the order of ``intent_code`` in the tuple - - >>> surf_gii_img.agg_data(('pointset', 'triangle')) - >>> surf_gii_img.agg_data(('triangle', 'pointset')) """ # Allow multiple intents to specify the order From bb7517d3166e0af462afa3fd6d924f246312a7d1 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Mon, 21 Oct 2019 17:05:28 +0100 Subject: [PATCH 327/689] Remove docstring in test --- nibabel/gifti/tests/test_gifti.py | 39 ------------------------------- 1 file changed, 39 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index ced6701aa1..0ff7402205 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -24,45 +24,6 @@ def test_agg_data(): - """ - Aggregate GIFTI data arrays into an ndarray or tuple of ndarray - - Examples - -------- - - Load two kinds of Gifti files: a surface file containing two types of intent code; - a functional file storing time series data only. - - >>> import nibabel as nib - >>> from nibabel.testing import test_data - >>> surf_gii_fname = test_data('gifti', 'ascii.gii') - >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'task.func.gii') - >>> func_gii_img = nib.load(func_gii_fname) - - When not passing anything to ``intent_code`` - - >>> surf_gii_img.agg_data() # surface file - >>> func_gii_img.agg_data() # functional file - - When passig matching intend codes ``intent_code`` - - >>> surf_gii_img.agg_data('pointset') # surface pointset - >>> surf_gii_img.agg_data('triangle') # surface triangle - >>> func_gii_img.agg_data('time series') # functional file - - When passing mismatching ``intent_code``, the function return a empty ``tuple`` - - >>> surf_gii_img.agg_data('time series') - >>> func_gii_img.agg_data('triangle') - - When passing tuple ``intent_code``, the output will follow - the order of ``intent_code`` in the tuple - - >>> surf_gii_img.agg_data(('pointset', 'triangle')) - >>> surf_gii_img.agg_data(('triangle', 'pointset')) - """ - surf_gii_fname = test_data('gifti', 'ascii.gii') surf_gii_img = nib.load(surf_gii_fname) func_gii_fname = test_data('gifti', 'task.func.gii') From 033ca516dfbca9855ee094fae6f4872fac19b9c4 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 22 Oct 2019 16:53:56 +0100 Subject: [PATCH 328/689] add minimum example to docstring --- nibabel/gifti/gifti.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 609807e2f9..0e37173ba2 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -704,6 +704,33 @@ def agg_data(self, intent_code=None): ------- tuple of ndarrays or ndarray If the input is a tuple, the returned tuple will match the order. + + Examples + -------- + >>> import nibabel as nib + >>> from nibabel.testing import test_data + >>> surf_gii_fname = test_data('gifti', 'ascii.gii') + >>> surf_gii_img = nib.load(surf_gii_fname) + >>> func_gii_fname = test_data('gifti', 'task.func.gii') + >>> func_gii_img = nib.load(func_gii_fname) + + Retrieve data without passing ``intent code`` + + >>> surf_data = surf_gii_img.agg_data() + >>> func_data = func_gii_img.agg_data() + + When passig matching intend codes ``intent_code`` + + >>> pointset_data = surf_gii_img.agg_data('pointset') # surface pointset + >>> triangle_data = surf_gii_img.agg_data('triangle') # surface triangle + >>> ts_data = func_gii_img.agg_data('time series') # functional file + + When passing mismatching ``intent_code``, the function return a empty ``tuple`` + + >>> surf_gii_img.agg_data('time series') + () + >>> func_gii_img.agg_data('triangle') + () """ # Allow multiple intents to specify the order From 76efe619d126259b771dd2f15c5cf6a2b3a0a466 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 22 Oct 2019 17:16:11 +0100 Subject: [PATCH 329/689] add shape gifti --- nibabel/gifti/tests/test_gifti.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 0ff7402205..694a7fa19b 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -24,24 +24,29 @@ def test_agg_data(): - surf_gii_fname = test_data('gifti', 'ascii.gii') - surf_gii_img = nib.load(surf_gii_fname) - func_gii_fname = test_data('gifti', 'task.func.gii') - func_gii_img = nib.load(func_gii_fname) + surf_gii_img = nib.load(test_data('gifti', 'ascii.gii')) + func_gii_img = nib.load(test_data('gifti', 'task.func.gii')) + shape_gii_img = nib.load(test_data('gifti', 'rh.shape.curv.gii')) + # add timeseries data with intent code ``none`` point_data = surf_gii_img.get_arrays_from_intent('pointset')[0].data triangle_data = surf_gii_img.get_arrays_from_intent('triangle')[0].data func_da = func_gii_img.get_arrays_from_intent('time series') func_data = np.column_stack(tuple(da.data for da in func_da)) + shape_data = shape_gii_img.get_arrays_from_intent('shape') - assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) # surface file - assert_array_equal(func_gii_img.agg_data(), func_data) # functional - assert_array_equal(surf_gii_img.agg_data('pointset'), point_data) # surface pointset - assert_array_equal(surf_gii_img.agg_data('triangle'), triangle_data) # surface triangle - assert_array_equal(func_gii_img.agg_data('time series'), func_data) # functional + assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) + assert_array_equal(func_gii_img.agg_data(), func_data) + assert_array_equal(shape_gii_img.agg_data(), shape_data) + + assert_array_equal(surf_gii_img.agg_data('pointset'), point_data) + assert_array_equal(surf_gii_img.agg_data('triangle'), triangle_data) + assert_array_equal(func_gii_img.agg_data('time series'), func_data) + assert_array_equal(shape_gii_img.agg_data('shape'), shape_data) assert_equal(surf_gii_img.agg_data('time series'), ()) assert_equal(func_gii_img.agg_data('triangle'), ()) + assert_equal(shape_gii_img.agg_data('pointset'), ()) assert_equal(surf_gii_img.agg_data(('pointset', 'triangle')), (point_data, triangle_data)) assert_equal(surf_gii_img.agg_data(('triangle', 'pointset')), (triangle_data, point_data)) From c8c2c43365cb6d2353f05271ed3dac6b310a10f4 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 23 Oct 2019 11:03:24 +0100 Subject: [PATCH 330/689] fix the test with shape gii --- nibabel/gifti/tests/test_gifti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 694a7fa19b..167e56d6cf 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -33,7 +33,7 @@ def test_agg_data(): triangle_data = surf_gii_img.get_arrays_from_intent('triangle')[0].data func_da = func_gii_img.get_arrays_from_intent('time series') func_data = np.column_stack(tuple(da.data for da in func_da)) - shape_data = shape_gii_img.get_arrays_from_intent('shape') + shape_data = shape_gii_img.get_arrays_from_intent('shape')[0].data assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) assert_array_equal(func_gii_img.agg_data(), func_data) From 654ee5b47c8321c6e8ff498747420477a1a6fa88 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Wed, 23 Oct 2019 12:53:51 +0100 Subject: [PATCH 331/689] delete trailing whitespace --- nibabel/gifti/gifti.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 0e37173ba2..55648786f6 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -704,7 +704,7 @@ def agg_data(self, intent_code=None): ------- tuple of ndarrays or ndarray If the input is a tuple, the returned tuple will match the order. - + Examples -------- >>> import nibabel as nib @@ -716,17 +716,17 @@ def agg_data(self, intent_code=None): Retrieve data without passing ``intent code`` - >>> surf_data = surf_gii_img.agg_data() + >>> surf_data = surf_gii_img.agg_data() >>> func_data = func_gii_img.agg_data() When passig matching intend codes ``intent_code`` - + >>> pointset_data = surf_gii_img.agg_data('pointset') # surface pointset >>> triangle_data = surf_gii_img.agg_data('triangle') # surface triangle >>> ts_data = func_gii_img.agg_data('time series') # functional file - + When passing mismatching ``intent_code``, the function return a empty ``tuple`` - + >>> surf_gii_img.agg_data('time series') () >>> func_gii_img.agg_data('triangle') From 4dca09b701d3d88d0f8c0660e2ec8fd46100f602 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 23 Oct 2019 08:38:40 -0400 Subject: [PATCH 332/689] CI: Test builds on Python 3.8 --- .travis.yml | 1 + azure-pipelines.yml | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/.travis.yml b/.travis.yml index af42de237a..d9069939e6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,6 +23,7 @@ env: python: - 3.6 - 3.7 + - 3.8 matrix: include: diff --git a/azure-pipelines.yml b/azure-pipelines.yml index f595ec35b7..39ff46a5fe 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -28,3 +28,9 @@ jobs: py37-x64: PYTHON_VERSION: '3.7' PYTHON_ARCH: 'x64' + py38-x86: + PYTHON_VERSION: '3.8' + PYTHON_ARCH: 'x86' + py38-x64: + PYTHON_VERSION: '3.8' + PYTHON_ARCH: 'x64' From e65425dcc87ff421a7d3358d0340c4e751cebfb9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 23 Oct 2019 08:49:48 -0400 Subject: [PATCH 333/689] CI: Python 3.8 not yet available on Azure --- azure-pipelines.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 39ff46a5fe..f6085d9cbe 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -28,9 +28,10 @@ jobs: py37-x64: PYTHON_VERSION: '3.7' PYTHON_ARCH: 'x64' - py38-x86: - PYTHON_VERSION: '3.8' - PYTHON_ARCH: 'x86' - py38-x64: - PYTHON_VERSION: '3.8' - PYTHON_ARCH: 'x64' + # Pending https://github.com/microsoft/azure-pipelines-image-generation/issues/1317 + # py38-x86: + # PYTHON_VERSION: '3.8' + # PYTHON_ARCH: 'x86' + # py38-x64: + # PYTHON_VERSION: '3.8' + # PYTHON_ARCH: 'x64' From 8195aa5d6477a4ddb4a5fe89b2662c33c24270ba Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 23 Oct 2019 09:01:45 -0400 Subject: [PATCH 334/689] CI: Wait on upstream releases for full 3.8 tests --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index d9069939e6..2166ae05e9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,11 +23,14 @@ env: python: - 3.6 - 3.7 - - 3.8 matrix: include: # Basic dependencies only + - python: 3.8 + env: + - DEPENDS="-r requirements.txt" + # Basic dependencies only - python: 3.5 env: - DEPENDS="-r requirements.txt" From f2c7812fafcaebaae9d5d222aabe6cad8e6d6fc7 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 23 Oct 2019 09:07:53 -0400 Subject: [PATCH 335/689] MNT: Update supported language classifiers --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 874f652234..5630921dd9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,6 +16,7 @@ classifiers = Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 Topic :: Scientific/Engineering license = MIT License description = Access a multitude of neuroimaging data formats From 60a46247c29c0e82fb38db0fa64381de8784c6dd Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Wed, 23 Oct 2019 08:30:41 -0700 Subject: [PATCH 336/689] Fix flake 8 issues --- nibabel/filebasedimages.py | 2 +- nibabel/filename_parser.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 76e6c8bfa4..90bbd8e652 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -12,7 +12,7 @@ from copy import deepcopy from .fileholders import FileHolder from .filename_parser import (types_filenames, TypesFilenamesError, - splitext_addext, _stringify_path) + splitext_addext) from .openers import ImageOpener from .deprecated import deprecate_with_version diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index b8a03060c0..ed04610fdd 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -26,12 +26,12 @@ def _stringify_path(filepath_or_buffer): Returns ------- str_filepath_or_buffer : str - + Notes ----- Objects supporting the fspath protocol (python 3.6+) are coerced according to its __fspath__ method. - For backwards compatibility with older pythons, pathlib.Path objects + For backwards compatibility with older pythons, pathlib.Path objects are specially coerced. Any other object is passed through unchanged, which includes bytes, strings, buffers, or anything else that's not even path-like. From 64b019a163eef652274eb1ae31b519096b271e90 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 27 Oct 2019 13:33:20 -0400 Subject: [PATCH 337/689] DOC: More comprehensive agg_data examples --- nibabel/gifti/gifti.py | 102 +++++++++++++++++++++++++++++++++-------- 1 file changed, 82 insertions(+), 20 deletions(-) diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index 55648786f6..b423ec48dd 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -707,29 +707,91 @@ def agg_data(self, intent_code=None): Examples -------- - >>> import nibabel as nib - >>> from nibabel.testing import test_data - >>> surf_gii_fname = test_data('gifti', 'ascii.gii') - >>> surf_gii_img = nib.load(surf_gii_fname) - >>> func_gii_fname = test_data('gifti', 'task.func.gii') - >>> func_gii_img = nib.load(func_gii_fname) - - Retrieve data without passing ``intent code`` - - >>> surf_data = surf_gii_img.agg_data() - >>> func_data = func_gii_img.agg_data() - When passig matching intend codes ``intent_code`` + Consider a surface GIFTI file: - >>> pointset_data = surf_gii_img.agg_data('pointset') # surface pointset - >>> triangle_data = surf_gii_img.agg_data('triangle') # surface triangle - >>> ts_data = func_gii_img.agg_data('time series') # functional file - - When passing mismatching ``intent_code``, the function return a empty ``tuple`` - - >>> surf_gii_img.agg_data('time series') + >>> import nibabel as nib + >>> from nibabel.testing import test_data + >>> surf_img = nib.load(test_data('gifti', 'ascii.gii')) + + The coordinate data, which is indicated by the ``NIFTI_INTENT_POINTSET`` + intent code, may be retrieved using any of the following equivalent + calls: + + >>> coords = surf_img.agg_data('NIFTI_INTENT_POINTSET') + >>> coords_2 = surf_img.agg_data('pointset') + >>> coords_3 = surf_img.agg_data(1008) # Numeric code for pointset + >>> print(np.array2string(coords, precision=3)) + [[-16.072 -66.188 21.267] + [-16.706 -66.054 21.233] + [-17.614 -65.402 21.071]] + >>> np.array_equal(coords, coords_2) + True + >>> np.array_equal(coords, coords_3) + True + + Similarly, the triangle mesh can be retrieved using various intent + specifiers: + + >>> triangles = surf_img.agg_data('NIFTI_INTENT_TRIANGLE') + >>> triangles_2 = surf_img.agg_data('triangle') + >>> triangles_3 = surf_img.agg_data(1009) # Numeric code for pointset + >>> print(np.array2string(triangles)) + [0 1 2] + >>> np.array_equal(triangles, triangles_2) + True + >>> np.array_equal(triangles, triangles_3) + True + + All arrays can be retrieved as a ``tuple`` by omitting the intent + code: + + >>> coords_4, triangles_4 = surf_img.agg_data() + >>> np.array_equal(coords, coords_4) + True + >>> np.array_equal(triangles, triangles_4) + True + + Finally, a tuple of intent codes may be passed in order to select + the arrays in a specific order: + + >>> triangles_5, coords_5 = surf_img.agg_data(('triangle', 'pointset')) + >>> np.array_equal(triangles, triangles_5) + True + >>> np.array_equal(coords, coords_5) + True + + The following image is a GIFTI file with ten (10) data arrays of the same + size, and with intent code 2001 (``NIFTI_INTENT_TIME_SERIES``): + + >>> func_img = nib.load(test_data('gifti', 'task.func.gii')) + + When aggregating time series data, these arrays are concatenated into + a single, vertex-by-timestep array: + + >>> series = func_img.agg_data() + >>> series.shape + (642, 10) + + In the case of a GIFTI file with unknown data arrays, it may be preferable + to specify the intent code, so that a time series array is always returned: + + >>> series_2 = func_img.agg_data('NIFTI_INTENT_TIME_SERIES') + >>> series_3 = func_img.agg_data('time series') + >>> series_4 = func_img.agg_data(2001) + >>> np.array_equal(series, series_2) + True + >>> np.array_equal(series, series_3) + True + >>> np.array_equal(series, series_4) + True + + Requesting a data array from a GIFTI file with no matching intent codes + will result in an empty tuple: + + >>> surf_img.agg_data('time series') () - >>> func_gii_img.agg_data('triangle') + >>> func_img.agg_data('triangle') () """ From a7acbac4952a593256881d3734b4e2922fc44244 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 3 Nov 2019 08:13:18 -0500 Subject: [PATCH 338/689] CI: Restore Python 3.8 Azure tests --- azure-pipelines.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index f6085d9cbe..d09c5b7740 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -3,7 +3,7 @@ jobs: - template: .azure-pipelines/windows.yml parameters: name: Windows - vmImage: vs2017-win2016 + vmImage: windows-2019 matrix: py35-x86: PYTHON_VERSION: '3.5' @@ -28,10 +28,9 @@ jobs: py37-x64: PYTHON_VERSION: '3.7' PYTHON_ARCH: 'x64' - # Pending https://github.com/microsoft/azure-pipelines-image-generation/issues/1317 - # py38-x86: - # PYTHON_VERSION: '3.8' - # PYTHON_ARCH: 'x86' - # py38-x64: - # PYTHON_VERSION: '3.8' - # PYTHON_ARCH: 'x64' + py38-x86: + PYTHON_VERSION: '3.8' + PYTHON_ARCH: 'x86' + py38-x64: + PYTHON_VERSION: '3.8' + PYTHON_ARCH: 'x64' From 3b3131a7897fb7665e9e0428e9a0dac4a1667471 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Mon, 16 Sep 2019 09:20:48 -0400 Subject: [PATCH 339/689] BF: only apply affine to selected streamlines --- nibabel/streamlines/array_sequence.py | 48 +++++++++++++++ .../streamlines/tests/test_array_sequence.py | 61 +++++++++++++++++++ nibabel/streamlines/tests/test_streamlines.py | 13 ++++ nibabel/streamlines/tests/test_tractogram.py | 16 +++++ nibabel/streamlines/tractogram.py | 7 +-- 5 files changed, 140 insertions(+), 5 deletions(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index e86cbb5127..3a6ef84e62 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -329,6 +329,54 @@ def __getitem__(self, idx): raise TypeError("Index must be either an int, a slice, a list of int" " or a ndarray of bool! Not " + str(type(idx))) + def __setitem__(self, idx, elements): + """ Set sequence(s) through standard or advanced numpy indexing. + + Parameters + ---------- + idx : int or slice or list or ndarray + If int, index of the element to retrieve. + If slice, use slicing to retrieve elements. + If list, indices of the elements to retrieve. + If ndarray with dtype int, indices of the elements to retrieve. + If ndarray with dtype bool, only retrieve selected elements. + elements: ndarray or :class:`ArraySequence` + Data that will overwrite selected sequences. + If `idx` is an int, `elements` is expected to be a ndarray. + Otherwise, `elements` is expected a :class:`ArraySequence` object. + """ + if isinstance(idx, (numbers.Integral, np.integer)): + start = self._offsets[idx] + self._data[start:start + self._lengths[idx]] = elements + return + + if isinstance(idx, tuple): + off_idx = idx[0] + data = self._data.__getitem__((slice(None),) + idx[1:]) + else: + off_idx = idx + data = self._data + + if isinstance(off_idx, slice): # Standard list slicing + offsets = self._offsets[off_idx] + lengths = self._lengths[off_idx] + + elif isinstance(off_idx, list) or is_ndarray_of_int_or_bool(off_idx): + # Fancy indexing + offsets = self._offsets[off_idx] + lengths = self._lengths[off_idx] + + else: + raise TypeError("Index must be either an int, a slice, a list of int" + " or a ndarray of bool! Not " + str(type(idx))) + + if len(lengths) != elements.total_nb_rows: + msg = "Trying to set {} sequences with {} sequences." + raise TypeError(msg.format(len(lengths), elements.total_nb_rows)) + + for o1, l1, o2, l2 in zip(offsets, lengths, elements._offsets, elements._lengths): + data[o1:o1 + l1] = elements._data[o2:o2 + l2] + def __iter__(self): if len(self._lengths) != len(self._offsets): raise ValueError("ArraySequence object corrupted:" diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 33421f45c7..ea05eefb27 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -277,6 +277,67 @@ def test_arraysequence_getitem(self): check_arr_seq_view(seq_view, SEQ_DATA['seq']) check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data'][::-2]]) + def test_arraysequence_setitem(self): + # Set one item + seq = SEQ_DATA['seq'] * 0 + for i, e in enumerate(SEQ_DATA['seq']): + seq[i] = e + + check_arr_seq(seq, SEQ_DATA['seq']) + + if sys.version_info < (3,): + seq = ArraySequence(SEQ_DATA['seq'] * 0) + for i, e in enumerate(SEQ_DATA['seq']): + seq[long(i)] = e + + check_arr_seq(seq, SEQ_DATA['seq']) + + # Get all items using indexing (creates a view). + indices = list(range(len(SEQ_DATA['seq']))) + seq_view = SEQ_DATA['seq'][indices] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + # We took all elements so the view should match the original. + check_arr_seq(seq_view, SEQ_DATA['seq']) + + # Get multiple items using ndarray of dtype integer. + for dtype in [np.int8, np.int16, np.int32, np.int64]: + seq_view = SEQ_DATA['seq'][np.array(indices, dtype=dtype)] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + # We took all elements so the view should match the original. + check_arr_seq(seq_view, SEQ_DATA['seq']) + + # Get multiple items out of order (creates a view). + SEQ_DATA['rng'].shuffle(indices) + seq_view = SEQ_DATA['seq'][indices] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + check_arr_seq(seq_view, [SEQ_DATA['data'][i] for i in indices]) + + # Get slice (this will create a view). + seq_view = SEQ_DATA['seq'][::2] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + check_arr_seq(seq_view, SEQ_DATA['data'][::2]) + + # Use advanced indexing with ndarray of data type bool. + selection = np.array([False, True, True, False, True]) + seq_view = SEQ_DATA['seq'][selection] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + check_arr_seq(seq_view, + [SEQ_DATA['data'][i] + for i, keep in enumerate(selection) if keep]) + + # Test invalid indexing + assert_raises(TypeError, SEQ_DATA['seq'].__getitem__, 'abc') + + # Get specific columns. + seq_view = SEQ_DATA['seq'][:, 2] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data']]) + + # Combining multiple slicing and indexing operations. + seq_view = SEQ_DATA['seq'][::-2][:, 2] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data'][::-2]]) + def test_arraysequence_repr(self): # Test that calling repr on a ArraySequence object is not falling. repr(SEQ_DATA['seq']) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 2f96e56843..2e537c63f2 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -267,6 +267,19 @@ def test_save_complex_file(self): tfile = nib.streamlines.load(filename, lazy_load=False) assert_tractogram_equal(tfile.tractogram, tractogram) + def test_save_sliced_tractogram(self): + tractogram = Tractogram(DATA['streamlines'], + affine_to_rasmm=np.eye(4)) + original_tractogram = tractogram.copy() + for ext, cls in FORMATS.items(): + with InTemporaryDirectory(): + filename = 'streamlines' + ext + nib.streamlines.save(tractogram[::2], filename) + tfile = nib.streamlines.load(filename, lazy_load=False) + assert_tractogram_equal(tfile.tractogram, tractogram[::2]) + # Make sure original tractogram hasn't changed. + assert_tractogram_equal(tractogram, original_tractogram) + def test_load_unknown_format(self): assert_raises(ValueError, nib.streamlines.load, "") diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 888de0bd49..4e143a4be9 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -688,6 +688,22 @@ def test_tractogram_apply_affine(self): np.dot(np.eye(4), np.dot(np.linalg.inv(affine), np.linalg.inv(affine)))) + # Applying the affine to a tractogram that has been indexed or sliced + # shouldn't affect the remaining streamlines. + tractogram = DATA['tractogram'].copy() + transformed_tractogram = tractogram[::2].apply_affine(affine) + assert_true(transformed_tractogram is not tractogram) + check_tractogram(tractogram[::2], + streamlines=[s*scaling for s in DATA['streamlines'][::2]], + data_per_streamline=DATA['tractogram'].data_per_streamline[::2], + data_per_point=DATA['tractogram'].data_per_point[::2]) + + # Remaining streamlines should match the original ones. + check_tractogram(tractogram[1::2], + streamlines=DATA['streamlines'][1::2], + data_per_streamline=DATA['tractogram'].data_per_streamline[1::2], + data_per_point=DATA['tractogram'].data_per_point[1::2]) + # Check that applying an affine and its inverse give us back the # original streamlines. tractogram = DATA['tractogram'].copy() diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index c6687b82aa..3d01d8426e 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -432,11 +432,8 @@ def apply_affine(self, affine, lazy=False): if np.all(affine == np.eye(4)): return self # No transformation. - BUFFER_SIZE = 10000000 # About 128 Mb since pts shape is 3. - for start in range(0, len(self.streamlines.data), BUFFER_SIZE): - end = start + BUFFER_SIZE - pts = self.streamlines._data[start:end] - self.streamlines.data[start:end] = apply_affine(affine, pts) + for i in range(len(self.streamlines)): + self.streamlines[i] = apply_affine(affine, self.streamlines[i]) if self.affine_to_rasmm is not None: # Update the affine that brings back the streamlines to RASmm. From 1642ba03b6c22fa42881c838aa023624e42d28f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Mon, 16 Sep 2019 09:22:22 -0400 Subject: [PATCH 340/689] NF: add support for Python operators to ArraySequence --- nibabel/streamlines/array_sequence.py | 77 +++++++++++++++++++ .../streamlines/tests/test_array_sequence.py | 61 +++++++++++++++ 2 files changed, 138 insertions(+) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 3a6ef84e62..20a3911d1e 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -53,6 +53,35 @@ def update_seq(self, arr_seq): arr_seq._lengths = np.array(self.lengths) +def _define_operators(cls): + """ Decorator which adds support for some Python operators. """ + def _wrap(cls, op, name=None, inplace=False, unary=False): + name = name or op + if unary: + setattr(cls, name, lambda self: self._op(op)) + else: + setattr(cls, name, + lambda self, value: self._op(op, value, inplace=inplace)) + + for op in ["__iadd__", "__isub__", "__imul__", "__idiv__", + "__ifloordiv__", "__itruediv__", "__ior__"]: + _wrap(cls, op, inplace=True) + + for op in ["__add__", "__sub__", "__mul__", "__div__", + "__floordiv__", "__truediv__", "__or__"]: + op_ = "__i{}__".format(op.strip("_")) + _wrap(cls, op_, name=op) + + for op in ["__eq__", "__ne__", "__lt__", "__le__", "__gt__", "__ge__"]: + _wrap(cls, op) + + for op in ["__neg__"]: + _wrap(cls, op, unary=True) + + return cls + + +@_define_operators class ArraySequence(object): """ Sequence of ndarrays having variable first dimension sizes. @@ -120,6 +149,23 @@ def data(self): """ Elements in this array sequence. """ return self._data + def _check_shape(self, arrseq): + """ Check whether this array sequence is compatible with another. """ + msg = "cannot perform operation - array sequences have different" + if len(self._lengths) != len(arrseq._lengths): + msg += " lengths: {} vs. {}." + raise ValueError(msg.format(len(self._lengths), len(arrseq._lengths))) + + if self.total_nb_rows != arrseq.total_nb_rows: + msg += " amount of data: {} vs. {}." + raise ValueError(msg.format(self.total_nb_rows, arrseq.total_nb_rows)) + + if self.common_shape != arrseq.common_shape: + msg += " common shape: {} vs. {}." + raise ValueError(msg.format(self.common_shape, arrseq.common_shape)) + + return True + def _get_next_offset(self): """ Offset in ``self._data`` at which to write next rowelement """ if len(self._offsets) == 0: @@ -377,6 +423,37 @@ def __setitem__(self, idx, elements): for o1, l1, o2, l2 in zip(offsets, lengths, elements._offsets, elements._lengths): data[o1:o1 + l1] = elements._data[o2:o2 + l2] + def _op(self, op, value=None, inplace=False): + """ Applies some operator to this arraysequence. + + This handles both unary and binary operators with a scalar or another + array sequence. Operations are performed directly on the underlying + data, or a copy of it, which depends on the value of `inplace`. + + Parameters + ---------- + op : str + Name of the Python operator (e.g., `"__add__"`). + value : scalar or :class:`ArraySequence`, optional + If None, the operator is assumed to be unary. + Otherwise, that value is used in the binary operation. + inplace: bool, optional + If False, the operation is done on a copy of this array sequence. + Otherwise, this array sequence gets modified directly. + """ + seq = self if inplace else self.copy() + + if is_array_sequence(value) and seq._check_shape(value): + for o1, l1, o2, l2 in zip(seq._offsets, seq._lengths, value._offsets, value._lengths): + seq._data[o1:o1 + l1] = getattr(seq._data[o1:o1 + l1], op)(value._data[o2:o2 + l2]) + + else: + args = [] if value is None else [value] # Dealing with unary and binary ops. + for o1, l1 in zip(seq._offsets, seq._lengths): + seq._data[o1:o1 + l1] = getattr(seq._data[o1:o1 + l1], op)(*args) + + return seq + def __iter__(self): if len(self._lengths) != len(self._offsets): raise ValueError("ArraySequence object corrupted:" diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index ea05eefb27..84c23b6c38 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -277,6 +277,67 @@ def test_arraysequence_getitem(self): check_arr_seq_view(seq_view, SEQ_DATA['seq']) check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data'][::-2]]) + def test_arraysequence_operators(self): + for op in ["__add__", "__sub__", "__mul__", "__floordiv__", "__truediv__", + "__eq__", "__ne__", "__lt__", "__le__", "__gt__", "__ge__"]: + # Test math operators with a scalar. + for scalar in [42, 0.5, True]: + seq = getattr(SEQ_DATA['seq'], op)(scalar) + assert_true(seq is not SEQ_DATA['seq']) + check_arr_seq(seq, [getattr(d, op)(scalar) for d in SEQ_DATA['data']]) + + # Test math operators with another ArraySequence. + seq = getattr(SEQ_DATA['seq'], op)(SEQ_DATA['seq']) + assert_true(seq is not SEQ_DATA['seq']) + check_arr_seq(seq, [getattr(d, op)(d) for d in SEQ_DATA['data']]) + + # Test math operators with ArraySequence views. + orig = SEQ_DATA['seq'][::2] + seq = getattr(orig, op)(orig) + assert_true(seq is not orig) + check_arr_seq(seq, [getattr(d, op)(d) for d in SEQ_DATA['data'][::2]]) + + # Test in-place operators. + for op in ["__iadd__", "__isub__", "__imul__", "__ifloordiv__", "__itruediv__"]: + # Test in-place math operators with a scalar. + for scalar in [42, 0.5, True]: + seq = seq_orig = SEQ_DATA['seq'].copy() + seq = getattr(seq, op)(scalar) + assert_true(seq is seq_orig) + check_arr_seq(seq, [getattr(d.copy(), op)(scalar) for d in SEQ_DATA['data']]) + + # Test in-place math operators with another ArraySequence. + seq = seq_orig = SEQ_DATA['seq'].copy() + seq = getattr(seq, op)(SEQ_DATA['seq']) + assert_true(seq is seq_orig) + check_arr_seq(seq, [getattr(d.copy(), op)(d) for d in SEQ_DATA['data']]) + + # Test in-place math operators with ArraySequence views. + seq = seq_orig = SEQ_DATA['seq'].copy()[::2] + seq = getattr(seq, op)(seq) + assert_true(seq is seq_orig) + check_arr_seq(seq, [getattr(d.copy(), op)(d) for d in SEQ_DATA['data'][::2]]) + + # Operations between array sequences of different lengths. + seq = SEQ_DATA['seq'].copy() + assert_raises(ValueError, getattr(seq, op), SEQ_DATA['seq'][::2]) + + # Operations between array sequences with different amount of data. + seq1 = ArraySequence(np.arange(10).reshape(5, 2)) + seq2 = ArraySequence(np.arange(15).reshape(5, 3)) + assert_raises(ValueError, getattr(seq1, op), seq2) + + # Operations between array sequences with different common shape. + seq1 = ArraySequence(np.arange(12).reshape(2, 2, 3)) + seq2 = ArraySequence(np.arange(8).reshape(2, 2, 2)) + assert_raises(ValueError, getattr(seq1, op), seq2) + + # Unary operators + for op in ["__neg__"]: + seq = getattr(SEQ_DATA['seq'], op)() + assert_true(seq is not SEQ_DATA['seq']) + check_arr_seq(seq, [getattr(d, op)() for d in SEQ_DATA['data']]) + def test_arraysequence_setitem(self): # Set one item seq = SEQ_DATA['seq'] * 0 From 53456316ec27edd0c59d4f3b08a2cc83902a02af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Mon, 16 Sep 2019 09:23:16 -0400 Subject: [PATCH 341/689] RF: make exposed ArraySequence.data return a copy --- nibabel/streamlines/array_sequence.py | 18 +++++++++++++++++- .../streamlines/tests/test_array_sequence.py | 9 +++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 20a3911d1e..c849902e30 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -1,5 +1,6 @@ import numbers +import warnings from operator import mul from functools import reduce @@ -147,7 +148,22 @@ def total_nb_rows(self): @property def data(self): """ Elements in this array sequence. """ - return self._data + warnings.warn("The 'ArraySequence.data' property has been deprecated" + " in favor of 'ArraySequence.get_data()'.", + DeprecationWarning, + stacklevel=2) + return self.get_data() + + def get_data(self): + """ Returns a copy of the elements in this array sequence. + + Notes + ----- + To modify the data on this array sequence, one can use + in-place mathematical operators (e.g., `seq += ...`) or the use + assignment operator (i.e, `seq[...] = value`). + """ + return self.copy()._data def _check_shape(self, arrseq): """ Check whether this array sequence is compatible with another. """ diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 84c23b6c38..560bb3f165 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -441,6 +441,15 @@ def test_save_and_load_arraysequence(self): # Make sure we can add new elements to it. loaded_seq.append(SEQ_DATA['data'][0]) + def test_get_data(self): + seq_view = SEQ_DATA['seq'][::2] + check_arr_seq_view(seq_view, SEQ_DATA['seq']) + + # We make sure the array sequence data does not + # contain more elements than it is supposed to. + data = seq_view.get_data() + assert len(data) < len(seq_view._data) + def test_concatenate(): seq = SEQ_DATA['seq'].copy() # In case there is in-place modification. From 74008110e1a4df11952a0e0225bfa47f7bdcf2ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Mon, 4 Nov 2019 14:37:25 -0500 Subject: [PATCH 342/689] Set arrseq dtype according to the arithmetic operators. Remove Python version checks. Refactor unit tests for arithmetic operators. --- nibabel/streamlines/array_sequence.py | 58 +++++--- .../streamlines/tests/test_array_sequence.py | 137 +++++++++++------- nibabel/streamlines/tests/test_tractogram.py | 3 - 3 files changed, 123 insertions(+), 75 deletions(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index c849902e30..84dddb21d1 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -56,27 +56,29 @@ def update_seq(self, arr_seq): def _define_operators(cls): """ Decorator which adds support for some Python operators. """ - def _wrap(cls, op, name=None, inplace=False, unary=False): - name = name or op - if unary: - setattr(cls, name, lambda self: self._op(op)) - else: - setattr(cls, name, - lambda self, value: self._op(op, value, inplace=inplace)) + def _wrap(cls, op, inplace=False, unary=False): + + def fn_unary_op(self): + return self._op(op) - for op in ["__iadd__", "__isub__", "__imul__", "__idiv__", - "__ifloordiv__", "__itruediv__", "__ior__"]: - _wrap(cls, op, inplace=True) + def fn_binary_op(self, value): + return self._op(op, value, inplace=inplace) - for op in ["__add__", "__sub__", "__mul__", "__div__", - "__floordiv__", "__truediv__", "__or__"]: - op_ = "__i{}__".format(op.strip("_")) - _wrap(cls, op_, name=op) + setattr(cls, op, fn_unary_op if unary else fn_binary_op) + fn = getattr(cls, op) + fn.__name__ = op + fn.__doc__ = getattr(np.ndarray, op).__doc__ + + for op in ["__add__", "__sub__", "__mul__", "__mod__", "__pow__", + "__floordiv__", "__truediv__", "__lshift__", "__rshift__", + "__or__", "__and__", "__xor__"]: + _wrap(cls, op=op, inplace=False) + _wrap(cls, op="__i{}__".format(op.strip("_")), inplace=True) for op in ["__eq__", "__ne__", "__lt__", "__le__", "__gt__", "__ge__"]: _wrap(cls, op) - for op in ["__neg__"]: + for op in ["__neg__", "__abs__", "__invert__"]: _wrap(cls, op, unary=True) return cls @@ -460,13 +462,31 @@ def _op(self, op, value=None, inplace=False): seq = self if inplace else self.copy() if is_array_sequence(value) and seq._check_shape(value): - for o1, l1, o2, l2 in zip(seq._offsets, seq._lengths, value._offsets, value._lengths): - seq._data[o1:o1 + l1] = getattr(seq._data[o1:o1 + l1], op)(value._data[o2:o2 + l2]) + elements = zip(seq._offsets, seq._lengths, + self._offsets, self._lengths, + value._offsets, value._lengths) + + # Change seq.dtype to match the operation resulting type. + o0, l0, o1, l1, o2, l2 = next(elements) + tmp = getattr(self._data[o1:o1 + l1], op)(value._data[o2:o2 + l2]) + seq._data = seq._data.astype(tmp.dtype) + seq._data[o0:o0 + l0] = tmp + + for o0, l0, o1, l1, o2, l2 in elements: + seq._data[o0:o0 + l0] = getattr(self._data[o1:o1 + l1], op)(value._data[o2:o2 + l2]) else: args = [] if value is None else [value] # Dealing with unary and binary ops. - for o1, l1 in zip(seq._offsets, seq._lengths): - seq._data[o1:o1 + l1] = getattr(seq._data[o1:o1 + l1], op)(*args) + elements = zip(seq._offsets, seq._lengths, self._offsets, self._lengths) + + # Change seq.dtype to match the operation resulting type. + o0, l0, o1, l1 = next(elements) + tmp = getattr(self._data[o1:o1 + l1], op)(*args) + seq._data = seq._data.astype(tmp.dtype) + seq._data[o0:o0 + l0] = tmp + + for o0, l0, o1, l1 in elements: + seq._data[o0:o0 + l0] = getattr(self._data[o1:o1 + l1], op)(*args) return seq diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 560bb3f165..e4db8eed32 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -24,7 +24,7 @@ def setup(): def generate_data(nb_arrays, common_shape, rng): - data = [rng.rand(*(rng.randint(3, 20),) + common_shape) + data = [rng.rand(*(rng.randint(3, 20),) + common_shape) * 100 for _ in range(nb_arrays)] return data @@ -228,9 +228,6 @@ def test_arraysequence_getitem(self): for i, e in enumerate(SEQ_DATA['seq']): assert_array_equal(SEQ_DATA['seq'][i], e) - if sys.version_info < (3,): - assert_array_equal(SEQ_DATA['seq'][long(i)], e) - # Get all items using indexing (creates a view). indices = list(range(len(SEQ_DATA['seq']))) seq_view = SEQ_DATA['seq'][indices] @@ -278,49 +275,42 @@ def test_arraysequence_getitem(self): check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data'][::-2]]) def test_arraysequence_operators(self): - for op in ["__add__", "__sub__", "__mul__", "__floordiv__", "__truediv__", - "__eq__", "__ne__", "__lt__", "__le__", "__gt__", "__ge__"]: - # Test math operators with a scalar. - for scalar in [42, 0.5, True]: - seq = getattr(SEQ_DATA['seq'], op)(scalar) - assert_true(seq is not SEQ_DATA['seq']) - check_arr_seq(seq, [getattr(d, op)(scalar) for d in SEQ_DATA['data']]) + # Disable division per zero warnings. + flags = np.seterr(divide='ignore', invalid='ignore') + SCALARS = [42, 0.5, True, -3, 0] + CMP_OPS = ["__eq__", "__ne__", "__lt__", "__le__", "__gt__", "__ge__"] - # Test math operators with another ArraySequence. - seq = getattr(SEQ_DATA['seq'], op)(SEQ_DATA['seq']) - assert_true(seq is not SEQ_DATA['seq']) - check_arr_seq(seq, [getattr(d, op)(d) for d in SEQ_DATA['data']]) + seq = SEQ_DATA['seq'].copy() + seq_int = SEQ_DATA['seq'].copy() + seq_int._data = seq_int._data.astype(int) + seq_bool = SEQ_DATA['seq'].copy() > 30 - # Test math operators with ArraySequence views. - orig = SEQ_DATA['seq'][::2] - seq = getattr(orig, op)(orig) + ARRSEQS = [seq, seq_int, seq_bool] + VIEWS = [seq[::2], seq_int[::2], seq_bool[::2]] + + def _test_unary(op, arrseq): + orig = arrseq.copy() + seq = getattr(orig, op)() assert_true(seq is not orig) - check_arr_seq(seq, [getattr(d, op)(d) for d in SEQ_DATA['data'][::2]]) - - # Test in-place operators. - for op in ["__iadd__", "__isub__", "__imul__", "__ifloordiv__", "__itruediv__"]: - # Test in-place math operators with a scalar. - for scalar in [42, 0.5, True]: - seq = seq_orig = SEQ_DATA['seq'].copy() - seq = getattr(seq, op)(scalar) - assert_true(seq is seq_orig) - check_arr_seq(seq, [getattr(d.copy(), op)(scalar) for d in SEQ_DATA['data']]) - - # Test in-place math operators with another ArraySequence. - seq = seq_orig = SEQ_DATA['seq'].copy() - seq = getattr(seq, op)(SEQ_DATA['seq']) - assert_true(seq is seq_orig) - check_arr_seq(seq, [getattr(d.copy(), op)(d) for d in SEQ_DATA['data']]) - - # Test in-place math operators with ArraySequence views. - seq = seq_orig = SEQ_DATA['seq'].copy()[::2] - seq = getattr(seq, op)(seq) - assert_true(seq is seq_orig) - check_arr_seq(seq, [getattr(d.copy(), op)(d) for d in SEQ_DATA['data'][::2]]) + check_arr_seq(seq, [getattr(d, op)() for d in orig]) + + def _test_binary(op, arrseq, scalars, seqs, inplace=False): + for scalar in scalars: + orig = arrseq.copy() + seq = getattr(orig, op)(scalar) + assert_true((seq is orig) if inplace else (seq is not orig)) + check_arr_seq(seq, [getattr(e, op)(scalar) for e in arrseq]) + + # Test math operators with another ArraySequence. + for other in seqs: + orig = arrseq.copy() + seq = getattr(orig, op)(other) + assert_true(seq is not SEQ_DATA['seq']) + check_arr_seq(seq, [getattr(e1, op)(e2) for e1, e2 in zip(arrseq, other)]) # Operations between array sequences of different lengths. - seq = SEQ_DATA['seq'].copy() - assert_raises(ValueError, getattr(seq, op), SEQ_DATA['seq'][::2]) + orig = arrseq.copy() + assert_raises(ValueError, getattr(orig, op), orig[::2]) # Operations between array sequences with different amount of data. seq1 = ArraySequence(np.arange(10).reshape(5, 2)) @@ -332,11 +322,59 @@ def test_arraysequence_operators(self): seq2 = ArraySequence(np.arange(8).reshape(2, 2, 2)) assert_raises(ValueError, getattr(seq1, op), seq2) + + for op in ["__add__", "__sub__", "__mul__", "__mod__", + "__floordiv__", "__truediv__"] + CMP_OPS: + _test_binary(op, seq, SCALARS, ARRSEQS) + _test_binary(op, seq_int, SCALARS, ARRSEQS) + + # Test math operators with ArraySequence views. + _test_binary(op, seq[::2], SCALARS, VIEWS) + _test_binary(op, seq_int[::2], SCALARS, VIEWS) + + if op in CMP_OPS: + continue + + op = "__i{}__".format(op.strip("_")) + _test_binary(op, seq, SCALARS, ARRSEQS, inplace=True) + + if op == "__itruediv__": + continue # Going to deal with it separately. + + _test_binary(op, seq_int, [42, -3, True, 0], [seq_int, seq_bool, -seq_int], inplace=True) # int <-- int + assert_raises(TypeError, _test_binary, op, seq_int, [0.5], [], inplace=True) # int <-- float + assert_raises(TypeError, _test_binary, op, seq_int, [], [seq], inplace=True) # int <-- float + + # __pow__ : Integers to negative integer powers are not allowed. + _test_binary("__pow__", seq, [42, -3, True, 0], [seq_int, seq_bool, -seq_int]) + _test_binary("__ipow__", seq, [42, -3, True, 0], [seq_int, seq_bool, -seq_int], inplace=True) + assert_raises(ValueError, _test_binary, "__pow__", seq_int, [-3], []) + assert_raises(ValueError, _test_binary, "__ipow__", seq_int, [-3], [], inplace=True) + + # __itruediv__ is only valid with float arrseq. + for scalar in SCALARS + ARRSEQS: + assert_raises(TypeError, getattr(seq_int.copy(), "__itruediv__"), scalar) + + # Bitwise operators + for op in ("__lshift__", "__rshift__", "__or__", "__and__", "__xor__"): + _test_binary(op, seq_bool, [42, -3, True, 0], [seq_int, seq_bool, -seq_int]) + assert_raises(TypeError, _test_binary, op, seq_bool, [0.5], []) + assert_raises(TypeError, _test_binary, op, seq, [], [seq]) + # Unary operators - for op in ["__neg__"]: - seq = getattr(SEQ_DATA['seq'], op)() - assert_true(seq is not SEQ_DATA['seq']) - check_arr_seq(seq, [getattr(d, op)() for d in SEQ_DATA['data']]) + for op in ["__neg__", "__abs__"]: + _test_unary(op, seq) + _test_unary(op, -seq) + _test_unary(op, seq_int) + _test_unary(op, -seq_int) + + _test_unary("__abs__", seq_bool) + _test_unary("__invert__", seq_bool) + assert_raises(TypeError, _test_unary, "__invert__", seq) + + # Restore flags. + np.seterr(**flags) + def test_arraysequence_setitem(self): # Set one item @@ -346,13 +384,6 @@ def test_arraysequence_setitem(self): check_arr_seq(seq, SEQ_DATA['seq']) - if sys.version_info < (3,): - seq = ArraySequence(SEQ_DATA['seq'] * 0) - for i, e in enumerate(SEQ_DATA['seq']): - seq[long(i)] = e - - check_arr_seq(seq, SEQ_DATA['seq']) - # Get all items using indexing (creates a view). indices = list(range(len(SEQ_DATA['seq']))) seq_view = SEQ_DATA['seq'][indices] diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 4e143a4be9..407f3ef413 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -539,9 +539,6 @@ def test_tractogram_getitem(self): for i, t in enumerate(DATA['tractogram']): assert_tractogram_item_equal(DATA['tractogram'][i], t) - if sys.version_info < (3,): - assert_tractogram_item_equal(DATA['tractogram'][long(i)], t) - # Get one TractogramItem out of two. tractogram_view = DATA['simple_tractogram'][::2] check_tractogram(tractogram_view, DATA['streamlines'][::2]) From 1b1f516227d36a42a94e21344bfc9039635c7153 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Mon, 4 Nov 2019 23:34:52 -0500 Subject: [PATCH 343/689] Setitem method for ArraySequence --- nibabel/streamlines/array_sequence.py | 39 +++++-- .../streamlines/tests/test_array_sequence.py | 105 +++++++++--------- 2 files changed, 77 insertions(+), 67 deletions(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 84dddb21d1..06991fae9e 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -6,6 +6,9 @@ import numpy as np +from nibabel.deprecated import deprecate_with_version + + MEGABYTE = 1024 * 1024 @@ -148,16 +151,15 @@ def total_nb_rows(self): return np.sum(self._lengths) @property + @deprecate_with_version("'ArraySequence.data' property is deprecated.\n" + "Please use the 'ArraySequence.get_data()' method instead", + '3.0', '4.0') def data(self): """ Elements in this array sequence. """ - warnings.warn("The 'ArraySequence.data' property has been deprecated" - " in favor of 'ArraySequence.get_data()'.", - DeprecationWarning, - stacklevel=2) return self.get_data() def get_data(self): - """ Returns a copy of the elements in this array sequence. + """ Returns a *copy* of the elements in this array sequence. Notes ----- @@ -384,7 +386,7 @@ def __getitem__(self, idx): seq._lengths = self._lengths[off_idx] return seq - if isinstance(off_idx, list) or is_ndarray_of_int_or_bool(off_idx): + if isinstance(off_idx, (list, range)) or is_ndarray_of_int_or_bool(off_idx): # Fancy indexing seq._offsets = self._offsets[off_idx] seq._lengths = self._lengths[off_idx] @@ -425,7 +427,7 @@ def __setitem__(self, idx, elements): offsets = self._offsets[off_idx] lengths = self._lengths[off_idx] - elif isinstance(off_idx, list) or is_ndarray_of_int_or_bool(off_idx): + elif isinstance(off_idx, (list, range)) or is_ndarray_of_int_or_bool(off_idx): # Fancy indexing offsets = self._offsets[off_idx] lengths = self._lengths[off_idx] @@ -434,12 +436,25 @@ def __setitem__(self, idx, elements): raise TypeError("Index must be either an int, a slice, a list of int" " or a ndarray of bool! Not " + str(type(idx))) - if len(lengths) != elements.total_nb_rows: - msg = "Trying to set {} sequences with {} sequences." - raise TypeError(msg.format(len(lengths), elements.total_nb_rows)) + if is_array_sequence(elements): + if len(lengths) != len(elements): + msg = "Trying to set {} sequences with {} sequences." + raise ValueError(msg.format(len(lengths), len(elements))) + + if sum(lengths) != elements.total_nb_rows: + msg = "Trying to set {} points with {} points." + raise ValueError(msg.format(sum(lengths), elements.total_nb_rows)) + + for o1, l1, o2, l2 in zip(offsets, lengths, elements._offsets, elements._lengths): + data[o1:o1 + l1] = elements._data[o2:o2 + l2] + + elif isinstance(elements, numbers.Number): + for o1, l1 in zip(offsets, lengths): + data[o1:o1 + l1] = elements - for o1, l1, o2, l2 in zip(offsets, lengths, elements._offsets, elements._lengths): - data[o1:o1 + l1] = elements._data[o2:o2 + l2] + else: # Try to iterate over it. + for o1, l1, element in zip(offsets, lengths, elements): + data[o1:o1 + l1] = element def _op(self, op, value=None, inplace=False): """ Applies some operator to this arraysequence. diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index e4db8eed32..c92580accb 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -274,6 +274,56 @@ def test_arraysequence_getitem(self): check_arr_seq_view(seq_view, SEQ_DATA['seq']) check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data'][::-2]]) + def test_arraysequence_setitem(self): + # Set one item + seq = SEQ_DATA['seq'] * 0 + for i, e in enumerate(SEQ_DATA['seq']): + seq[i] = e + + check_arr_seq(seq, SEQ_DATA['seq']) + + # Setitem with a scalar. + seq = SEQ_DATA['seq'].copy() + seq[:] = 0 + assert_true(seq._data.sum() == 0) + + # Setitem with a list of ndarray. + seq = SEQ_DATA['seq'] * 0 + seq[:] = SEQ_DATA['data'] + check_arr_seq(seq, SEQ_DATA['data']) + + # Setitem using tuple indexing. + seq = ArraySequence(np.arange(900).reshape((50,6,3))) + seq[:, 0] = 0 + assert_true(seq._data[:, 0].sum() == 0) + + # Setitem using tuple indexing. + seq = ArraySequence(np.arange(900).reshape((50,6,3))) + seq[range(len(seq))] = 0 + assert_true(seq._data.sum() == 0) + + # Setitem of a slice using another slice. + seq = ArraySequence(np.arange(900).reshape((50,6,3))) + seq[0:4] = seq[5:9] + check_arr_seq(seq[0:4], seq[5:9]) + + # Setitem between array sequences with different number of sequences. + seq = ArraySequence(np.arange(900).reshape((50,6,3))) + assert_raises(ValueError, seq.__setitem__, slice(0, 4), seq[5:10]) + + # Setitem between array sequences with different amount of points. + seq1 = ArraySequence(np.arange(10).reshape(5, 2)) + seq2 = ArraySequence(np.arange(15).reshape(5, 3)) + assert_raises(ValueError, seq1.__setitem__, slice(0, 5), seq2) + + # Setitem between array sequences with different common shape. + seq1 = ArraySequence(np.arange(12).reshape(2, 2, 3)) + seq2 = ArraySequence(np.arange(8).reshape(2, 2, 2)) + assert_raises(ValueError, seq1.__setitem__, slice(0, 2), seq2) + + # Invalid index. + assert_raises(TypeError, seq.__setitem__, object(), None) + def test_arraysequence_operators(self): # Disable division per zero warnings. flags = np.seterr(divide='ignore', invalid='ignore') @@ -375,61 +425,6 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): # Restore flags. np.seterr(**flags) - - def test_arraysequence_setitem(self): - # Set one item - seq = SEQ_DATA['seq'] * 0 - for i, e in enumerate(SEQ_DATA['seq']): - seq[i] = e - - check_arr_seq(seq, SEQ_DATA['seq']) - - # Get all items using indexing (creates a view). - indices = list(range(len(SEQ_DATA['seq']))) - seq_view = SEQ_DATA['seq'][indices] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - # We took all elements so the view should match the original. - check_arr_seq(seq_view, SEQ_DATA['seq']) - - # Get multiple items using ndarray of dtype integer. - for dtype in [np.int8, np.int16, np.int32, np.int64]: - seq_view = SEQ_DATA['seq'][np.array(indices, dtype=dtype)] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - # We took all elements so the view should match the original. - check_arr_seq(seq_view, SEQ_DATA['seq']) - - # Get multiple items out of order (creates a view). - SEQ_DATA['rng'].shuffle(indices) - seq_view = SEQ_DATA['seq'][indices] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - check_arr_seq(seq_view, [SEQ_DATA['data'][i] for i in indices]) - - # Get slice (this will create a view). - seq_view = SEQ_DATA['seq'][::2] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - check_arr_seq(seq_view, SEQ_DATA['data'][::2]) - - # Use advanced indexing with ndarray of data type bool. - selection = np.array([False, True, True, False, True]) - seq_view = SEQ_DATA['seq'][selection] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - check_arr_seq(seq_view, - [SEQ_DATA['data'][i] - for i, keep in enumerate(selection) if keep]) - - # Test invalid indexing - assert_raises(TypeError, SEQ_DATA['seq'].__getitem__, 'abc') - - # Get specific columns. - seq_view = SEQ_DATA['seq'][:, 2] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data']]) - - # Combining multiple slicing and indexing operations. - seq_view = SEQ_DATA['seq'][::-2][:, 2] - check_arr_seq_view(seq_view, SEQ_DATA['seq']) - check_arr_seq(seq_view, [d[:, 2] for d in SEQ_DATA['data'][::-2]]) - def test_arraysequence_repr(self): # Test that calling repr on a ArraySequence object is not falling. repr(SEQ_DATA['seq']) From ea3e60707ff5711a0a93ed502d67c596268cc99e Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 8 Nov 2019 09:23:39 -0500 Subject: [PATCH 344/689] NF: Enable data scaling within the target dtype --- nibabel/arrayproxy.py | 85 +++++++++++++++++++++++++++++---------- nibabel/dataobj_images.py | 10 ++++- 2 files changed, 72 insertions(+), 23 deletions(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 79e5d87c10..9be4c4b469 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -33,7 +33,7 @@ from .deprecated import deprecate_with_version from .volumeutils import array_from_file, apply_read_scaling -from .fileslice import fileslice +from .fileslice import fileslice, canonical_slicers from .keywordonly import kw_only_meth from . import openers @@ -336,36 +336,77 @@ def _get_fileobj(self): self.file_like, keep_open=False) as opener: yield opener - def get_unscaled(self): - """ Read of data from file - - This is an optional part of the proxy API - """ - with self._get_fileobj() as fileobj, self._lock: - raw_data = array_from_file(self._shape, + def _get_unscaled(self, slicer): + if canonical_slicers(slicer, self._shape, False) == \ + canonical_slicers((), self._shape, False): + with self._get_fileobj() as fileobj, self._lock: + return array_from_file(self._shape, self._dtype, fileobj, offset=self._offset, order=self.order, mmap=self._mmap) - return raw_data + with self._get_fileobj() as fileobj: + return fileslice(fileobj, + slicer, + self._shape, + self._dtype, + self._offset, + order=self.order, + lock=self._lock) + + def _get_scaled(self, dtype, slicer): + # Ensure scale factors have dtypes + scl_slope = np.asanyarray(self._slope) + scl_inter = np.asanyarray(self._inter) + if dtype is None: + dtype = scl_slope.dtype + slope = scl_slope.astype(dtype) + inter = scl_inter.astype(dtype) + # Read array + raw_data = self._get_unscaled(slicer=slicer) + # Upcast as necessary for big slopes, intercepts + return apply_read_scaling(raw_data, slope, inter) + + def get_unscaled(self): + """ Read data from file + + This is an optional part of the proxy API + """ + return self._get_unscaled(slicer=()) + + def get_scaled(self, dtype=None): + """ Read data from file and apply scaling + + The dtype of the returned array is the narrowest dtype that can + represent the data without overflow, and is at least as wide as + the dtype parameter. + + If dtype is unspecified, it is the wider of the dtypes of the slope + or intercept. This will generally be determined by the parameter + size in the image header, and so should be consistent for a given + image format, but may vary across formats. Notably, these factors + are single-precision (32-bit) floats for NIfTI-1 and double-precision + (64-bit) floats for NIfTI-2. + + Parameters + ---------- + dtype : numpy dtype specifier + A numpy dtype specifier specifying the narrowest acceptable + dtype. + + Returns + ------- + array + Scaled of image data of data type `dtype`. + """ + return self._get_scaled(dtype=dtype, slicer=()) def __array__(self): - # Read array and scale - raw_data = self.get_unscaled() - return apply_read_scaling(raw_data, self._slope, self._inter) + return self._get_scaled(dtype=None, slicer=()) def __getitem__(self, slicer): - with self._get_fileobj() as fileobj: - raw_data = fileslice(fileobj, - slicer, - self._shape, - self._dtype, - self._offset, - order=self.order, - lock=self._lock) - # Upcast as necessary for big slopes, intercepts - return apply_read_scaling(raw_data, self._slope, self._inter) + return self._get_scaled(dtype=None, slicer=slicer) def reshape(self, shape): """ Return an ArrayProxy with a new shape, without modifying data """ diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 3c0558f43e..dd4c853537 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -10,6 +10,7 @@ import numpy as np +from .arrayproxy import is_proxy from .filebasedimages import FileBasedImage from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version @@ -350,7 +351,14 @@ def get_fdata(self, caching='fill', dtype=np.float64): if self._fdata_cache is not None: if self._fdata_cache.dtype.type == dtype.type: return self._fdata_cache - data = np.asanyarray(self._dataobj).astype(dtype, copy=False) + dataobj = self._dataobj + # Attempt to confine data array to dtype during scaling + # On overflow, may still upcast + if is_proxy(dataobj): + dataobj = dataobj.get_scaled(dtype=dtype) + # Always return requested data type + # For array proxies, will only copy on overflow + data = np.asanyarray(dataobj, dtype=dtype) if caching == 'fill': self._fdata_cache = data return data From 2018e94b69c8f430c253b0fd376af4a2b110453b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 8 Nov 2019 10:47:32 -0500 Subject: [PATCH 345/689] RF: Implement AFNIArrayProxy._get_scaled(), replacing other overrides --- nibabel/brikhead.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 3ddfd2e8ad..cd4cc63f1b 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -262,19 +262,20 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None): def scaling(self): return self._scaling - def __array__(self): - raw_data = self.get_unscaled() - # datatype may change if applying self._scaling - return raw_data if self.scaling is None else raw_data * self.scaling - - def __getitem__(self, slicer): - raw_data = super(AFNIArrayProxy, self).__getitem__(slicer) - # apply volume specific scaling (may change datatype!) - if self.scaling is not None: - fake_data = strided_scalar(self._shape) - _, scaling = np.broadcast_arrays(fake_data, self.scaling) - raw_data = raw_data * scaling[slicer] - return raw_data + def _get_scaled(self, dtype, slicer): + raw_data = self._get_unscaled(slicer=slicer) + if self.scaling is None: + if dtype is None or raw_data.dtype >= np.dtype(dtype): + return raw_data + return np.asanyarray(raw_data, dtype=dtype) + + if dtype is None: + dtype = self.scaling.dtype + + fake_data = strided_scalar(self._shape) + _, scaling = np.broadcast_arrays(fake_data, self.scaling.astype(dtype)) + + return raw_data * scaling[slicer] class AFNIHeader(SpatialHeader): From 16e35522b4ac6d7d5eb953c5a0863d00d51adf26 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 8 Nov 2019 12:07:50 -0500 Subject: [PATCH 346/689] ENH: Add get_scaled to Minc, Ecat --- nibabel/ecat.py | 6 ++++++ nibabel/minc1.py | 13 +++++++++++-- nibabel/tests/test_spatialimages.py | 4 ++-- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index f3a7f1736c..a0500ff910 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -704,6 +704,12 @@ def __array__(self): frame_mapping[i][0]) return data + def get_scaled(self, dtype=None): + data = self.__array__() + if dtype is not None and np.dtype(dtype) > data.dtype: + data = data.astype(dtype) + return data + def __getitem__(self, sliceobj): """ Return slice `sliceobj` from ECAT data, optimizing if possible """ diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 1ca25eaf9c..d276f24074 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -261,13 +261,22 @@ def ndim(self): def is_proxy(self): return True + def _get_scaled(self, dtype, slicer): + data = self.minc_file.get_scaled_data(slicer) + if dtype is not None and np.dtype(dtype) > data.dtype: + data = data.astype(dtype) + return data + + def get_scaled(self, dtype=None): + return self._get_scaled(dtype=dtype, slicer=()) + def __array__(self): ''' Read of data from file ''' - return self.minc_file.get_scaled_data() + return self._get_scaled(dtype=None, slicer=()) def __getitem__(self, sliceobj): """ Read slice `sliceobj` of data from file """ - return self.minc_file.get_scaled_data(sliceobj) + return self._get_scaled(dtype=None, slicer=sliceobj) class MincHeader(SpatialHeader): diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 54633c9820..7d275e3366 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -195,8 +195,8 @@ class DataLike(object): # Minimal class implementing 'data' API shape = (3,) - def __array__(self): - return np.arange(3, dtype=np.int16) + def __array__(self, dtype='int16'): + return np.arange(3, dtype=dtype) class TestSpatialImage(TestCase): From dfebe58b84a53e75e2b3a677f86e9d10b3b6ed5e Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 8 Nov 2019 13:52:54 -0500 Subject: [PATCH 347/689] RF: Rework PARRECArrayProxy --- nibabel/parrec.py | 49 +++++++++++++++++++++++++++++++---------------- 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 8ed9adf2c5..9e86bd313f 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -639,32 +639,49 @@ def get_unscaled(self): self._slice_indices, self._shape, mmap=self._mmap) - def __array__(self): - with ImageOpener(self.file_like) as fileobj: - return _data_from_rec(fileobj, - self._rec_shape, - self._dtype, - self._slice_indices, - self._shape, - scalings=self._slice_scaling, - mmap=self._mmap) - - def __getitem__(self, slicer): + def _get_unscaled(self, slicer): indices = self._slice_indices - if indices[0] != 0 or np.any(np.diff(indices) != 1): + if slicer == (): + with ImageOpener(self.file_like) as fileobj: + rec_data = array_from_file(self._rec_shape, self._dtype, fileobj, mmap=self._mmap) + rec_data = rec_data[..., indices] + return rec_data.reshape(self._shape, order='F') + elif indices[0] != 0 or np.any(np.diff(indices) != 1): # We can't load direct from REC file, use inefficient slicing - return np.asanyarray(self)[slicer] + return self._get_unscaled(())[slicer] + # Slices all sequential from zero, can use fileslice # This gives more efficient volume by volume loading, for example with ImageOpener(self.file_like) as fileobj: - raw_data = fileslice(fileobj, slicer, self._shape, self._dtype, 0, - 'F') + return fileslice(fileobj, slicer, self._shape, self._dtype, 0, 'F') + + def _get_scaled(self, dtype, slicer): + raw_data = self._get_unscaled(slicer) + if self._slice_scaling is None: + if dtype is None or raw_data.dtype >= np.dtype(dtype): + return raw_data + return np.asanyarray(raw_data, dtype=dtype) + # Broadcast scaling to shape of original data slopes, inters = self._slice_scaling fake_data = strided_scalar(self._shape) _, slopes, inters = np.broadcast_arrays(fake_data, slopes, inters) + + if dtype is None: + dtype = max(slopes.dtype, inters.dtype) + # Slice scaling to give output shape - return raw_data * slopes[slicer] + inters[slicer] + return raw_data * slopes.astype(dtype)[slicer] + inters.astype(dtype)[slicer] + + + def get_scaled(self, dtype=None): + return self._get_scaled(dtype=dtype, slicer=()) + + def __array__(self): + return self._get_scaled(dtype=None, slicer=()) + + def __getitem__(self, slicer): + return self._get_scaled(dtype=None, slicer=slicer) class PARRECHeader(SpatialHeader): From 5577eb86a5a39c5672395235f6236ebe2c5b6698 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 8 Nov 2019 14:24:42 -0500 Subject: [PATCH 348/689] TEST: Revise equality check to approximate get_fdata(dtype=float32) and get_fdata(dtype=float64).astype(float32) are no longer equivalent --- nibabel/tests/test_image_api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 3b921b9fb9..f571311a7e 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -44,7 +44,7 @@ from nose import SkipTest from nose.tools import (assert_true, assert_false, assert_raises, assert_equal) -from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns +from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns, assert_allclose from ..testing import clear_and_catch_warnings from ..tmpdirs import InTemporaryDirectory @@ -314,18 +314,18 @@ def _check_proxy_interface(self, imaker, meth_name): # New data dtype, no caching, doesn't use or alter cache fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') # We get back the original read, not the modified cache - assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) + assert_allclose(fdata_new_dt, proxy_data.astype('f4'), rtol=1e-05, atol=1e-08) assert_equal(fdata_new_dt.dtype, np.float32) # The original cache stays in place, for default float64 assert_array_equal(img.get_fdata(), 42) # And for not-default float32, because we haven't cached fdata_new_dt[:] = 43 fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') - assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) + assert_allclose(fdata_new_dt, proxy_data.astype('f4'), rtol=1e-05, atol=1e-08) # Until we reset with caching='fill', at which point we # drop the original float64 cache, and have a float32 cache fdata_new_dt = img.get_fdata(caching='fill', dtype='f4') - assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) + assert_allclose(fdata_new_dt, proxy_data.astype('f4'), rtol=1e-05, atol=1e-08) # We're using the cache, for dtype='f4' reads fdata_new_dt[:] = 43 assert_array_equal(img.get_fdata(dtype='f4'), 43) From 1ab8d581ce2dc5fa9b96614be035854ed707c46e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Fri, 8 Nov 2019 10:16:34 -0500 Subject: [PATCH 349/689] Keep old ArraySequence.data behavior but make the view read-only. --- nibabel/streamlines/array_sequence.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 06991fae9e..6ea869bda7 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -1,6 +1,5 @@ import numbers -import warnings from operator import mul from functools import reduce @@ -156,7 +155,9 @@ def total_nb_rows(self): '3.0', '4.0') def data(self): """ Elements in this array sequence. """ - return self.get_data() + view = self._data.view() + view.setflags(write=False) + return view def get_data(self): """ Returns a *copy* of the elements in this array sequence. From 7fba8d9a69e3fa129275a9c401ad3b06b191e1ca Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 9 Nov 2019 17:30:52 -0500 Subject: [PATCH 350/689] CI: Enable most dependencies for Python 3.8 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 2166ae05e9..c5cdc7e767 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,7 +29,7 @@ matrix: # Basic dependencies only - python: 3.8 env: - - DEPENDS="-r requirements.txt" + - DEPENDS="numpy scipy pillow pydicom indexed_gzip" # Basic dependencies only - python: 3.5 env: From 785e9e2fd79eda53835df45844b875ac8252346a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 9 Nov 2019 17:43:42 -0500 Subject: [PATCH 351/689] CI: Test with matplotlib~=3.2.0rc1 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c5cdc7e767..1b81ba296c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,7 +29,7 @@ matrix: # Basic dependencies only - python: 3.8 env: - - DEPENDS="numpy scipy pillow pydicom indexed_gzip" + - DEPENDS="numpy scipy matplotlib~=3.2.0rc1 pillow pydicom indexed_gzip" # Basic dependencies only - python: 3.5 env: From ac52ad826dfc0cf07512b074577d8240aeb3701d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 13:44:41 -0500 Subject: [PATCH 352/689] TEST: Validate get_scaled with several float types --- nibabel/tests/test_proxy_api.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 494ab4b556..b29e7789cc 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -52,7 +52,7 @@ from nose import SkipTest from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) + assert_equal, assert_not_equal, assert_greater_equal) from numpy.testing import (assert_almost_equal, assert_array_equal) @@ -131,6 +131,22 @@ def validate_asarray(self, pmaker, params): # Shape matches expected shape assert_equal(out.shape, params['shape']) + def validate_get_scaled(self, pmaker, params): + # Check proxy returns expected array from asarray + prox, fio, hdr = pmaker() + out = prox.get_scaled() + assert_array_equal(out, params['arr_out']) + assert_dt_equal(out.dtype, params['dtype_out']) + # Shape matches expected shape + assert_equal(out.shape, params['shape']) + + for dtype in (np.float16, np.float32, np.float64, np.float128): + out = prox.get_scaled(dtype=dtype) + assert_almost_equal(out, params['arr_out']) + assert_greater_equal(out.dtype, np.dtype(dtype)) + # Shape matches expected shape + assert_equal(out.shape, params['shape']) + def validate_header_isolated(self, pmaker, params): # Confirm altering input header has no effect # Depends on header providing 'get_data_dtype', 'set_data_dtype', From 066c32a5edd7f28e330cfb54fefa5b75f3ccf7f4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 14:22:02 -0500 Subject: [PATCH 353/689] FIX: Better type promotion --- nibabel/arrayproxy.py | 13 ++++++++----- nibabel/parrec.py | 19 ++++++++++++++----- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 9be4c4b469..3710bc6832 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -359,14 +359,17 @@ def _get_scaled(self, dtype, slicer): # Ensure scale factors have dtypes scl_slope = np.asanyarray(self._slope) scl_inter = np.asanyarray(self._inter) - if dtype is None: - dtype = scl_slope.dtype - slope = scl_slope.astype(dtype) - inter = scl_inter.astype(dtype) + use_dtype = scl_slope.dtype if dtype is None else dtype + slope = scl_slope.astype(use_dtype) + inter = scl_inter.astype(use_dtype) # Read array raw_data = self._get_unscaled(slicer=slicer) # Upcast as necessary for big slopes, intercepts - return apply_read_scaling(raw_data, slope, inter) + scaled = apply_read_scaling(raw_data, slope, inter) + del raw_data + if dtype is not None: + scaled = scaled.astype(np.promote_types(scaled.dtype, dtype), copy=False) + return scaled def get_unscaled(self): """ Read data from file diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 9e86bd313f..279016f0ef 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -658,20 +658,29 @@ def _get_unscaled(self, slicer): def _get_scaled(self, dtype, slicer): raw_data = self._get_unscaled(slicer) if self._slice_scaling is None: - if dtype is None or raw_data.dtype >= np.dtype(dtype): + if dtype is None: return raw_data - return np.asanyarray(raw_data, dtype=dtype) + final_type = np.promote_types(raw_data.dtype, dtype) + return raw_data.astype(final_type, copy=False) # Broadcast scaling to shape of original data slopes, inters = self._slice_scaling fake_data = strided_scalar(self._shape) _, slopes, inters = np.broadcast_arrays(fake_data, slopes, inters) - if dtype is None: - dtype = max(slopes.dtype, inters.dtype) + final_type = np.result_type(raw_data, slopes, inters) + if dtype is not None: + final_type = np.promote_types(final_type, dtype) + + slopes = slopes.astype(final_type) + inters = inters.astype(final_type) + + if slicer is not None: + slopes = slopes[slicer] + inters = inters[slicer] # Slice scaling to give output shape - return raw_data * slopes.astype(dtype)[slicer] + inters.astype(dtype)[slicer] + return raw_data * slopes + inters def get_scaled(self, dtype=None): From c0237f1ebd4bd9ade039865156f440930be0568c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 14:56:08 -0500 Subject: [PATCH 354/689] TEST: Use np.sctypes to detect available floats --- nibabel/tests/test_proxy_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index b29e7789cc..f79f78fef2 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -140,7 +140,7 @@ def validate_get_scaled(self, pmaker, params): # Shape matches expected shape assert_equal(out.shape, params['shape']) - for dtype in (np.float16, np.float32, np.float64, np.float128): + for dtype in np.sctypes['float']: out = prox.get_scaled(dtype=dtype) assert_almost_equal(out, params['arr_out']) assert_greater_equal(out.dtype, np.dtype(dtype)) From 29a68dd867be2fc698ad246ee3a25c1c67399255 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 15:10:58 -0500 Subject: [PATCH 355/689] RF: Uniformize AFNI/PARREC scaling --- nibabel/brikhead.py | 18 +++++++++++------- nibabel/parrec.py | 12 ++---------- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index cd4cc63f1b..7ef1386872 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -265,17 +265,21 @@ def scaling(self): def _get_scaled(self, dtype, slicer): raw_data = self._get_unscaled(slicer=slicer) if self.scaling is None: - if dtype is None or raw_data.dtype >= np.dtype(dtype): + if dtype is None: return raw_data - return np.asanyarray(raw_data, dtype=dtype) - - if dtype is None: - dtype = self.scaling.dtype + final_type = np.promote_types(raw_data.dtype, dtype) + return raw_data.astype(final_type, copy=False) + # Broadcast scaling to shape of original data fake_data = strided_scalar(self._shape) - _, scaling = np.broadcast_arrays(fake_data, self.scaling.astype(dtype)) + _, scaling = np.broadcast_arrays(fake_data, self.scaling) + + final_type = np.result_type(raw_data, scaling) + if dtype is not None: + final_type = np.promote_types(final_type, dtype) - return raw_data * scaling[slicer] + # Slice scaling to give output shape + return raw_data * scaling[slicer].astype(final_type) class AFNIHeader(SpatialHeader): diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 279016f0ef..8c10fb1ec8 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -664,23 +664,15 @@ def _get_scaled(self, dtype, slicer): return raw_data.astype(final_type, copy=False) # Broadcast scaling to shape of original data - slopes, inters = self._slice_scaling fake_data = strided_scalar(self._shape) - _, slopes, inters = np.broadcast_arrays(fake_data, slopes, inters) + _, slopes, inters = np.broadcast_arrays(fake_data, *self._slice_scaling) final_type = np.result_type(raw_data, slopes, inters) if dtype is not None: final_type = np.promote_types(final_type, dtype) - slopes = slopes.astype(final_type) - inters = inters.astype(final_type) - - if slicer is not None: - slopes = slopes[slicer] - inters = inters[slicer] - # Slice scaling to give output shape - return raw_data * slopes + inters + return raw_data * slopes[slicer].astype(final_type) + inters[slicer].astype(final_type) def get_scaled(self, dtype=None): From 54e87bb7c43b488bc85690fc2aa1abd4f46adcc1 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 15:50:41 -0500 Subject: [PATCH 356/689] RF: Use consistent logic for ECAT, Minc1 and PARREC proxies --- nibabel/ecat.py | 7 ++++--- nibabel/minc1.py | 7 ++++--- nibabel/parrec.py | 9 +++------ 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index a0500ff910..cc3345292e 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -706,9 +706,10 @@ def __array__(self): def get_scaled(self, dtype=None): data = self.__array__() - if dtype is not None and np.dtype(dtype) > data.dtype: - data = data.astype(dtype) - return data + if dtype is None: + return data + final_type = np.promote_types(data.dtype, dtype) + return data.astype(final_type, copy=False) def __getitem__(self, sliceobj): """ Return slice `sliceobj` from ECAT data, optimizing if possible diff --git a/nibabel/minc1.py b/nibabel/minc1.py index d276f24074..45f2538415 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -263,9 +263,10 @@ def is_proxy(self): def _get_scaled(self, dtype, slicer): data = self.minc_file.get_scaled_data(slicer) - if dtype is not None and np.dtype(dtype) > data.dtype: - data = data.astype(dtype) - return data + if dtype is None: + return data + final_type = np.promote_types(data.dtype, dtype) + return data.astype(final_type, copy=False) def get_scaled(self, dtype=None): return self._get_scaled(dtype=dtype, slicer=()) diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 8c10fb1ec8..1f6193db13 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -633,12 +633,6 @@ def dtype(self): def is_proxy(self): return True - def get_unscaled(self): - with ImageOpener(self.file_like) as fileobj: - return _data_from_rec(fileobj, self._rec_shape, self._dtype, - self._slice_indices, self._shape, - mmap=self._mmap) - def _get_unscaled(self, slicer): indices = self._slice_indices if slicer == (): @@ -675,6 +669,9 @@ def _get_scaled(self, dtype, slicer): return raw_data * slopes[slicer].astype(final_type) + inters[slicer].astype(final_type) + def get_unscaled(self): + return self._get_unscaled(slicer=()) + def get_scaled(self, dtype=None): return self._get_scaled(dtype=dtype, slicer=()) From bec78b6fca9f6168e6b5a4d8ed2046f9344c1de0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 17:10:18 -0500 Subject: [PATCH 357/689] TEST: Verify get_scaled works sensibly with ints --- nibabel/tests/test_proxy_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index f79f78fef2..b8316d7291 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -140,7 +140,7 @@ def validate_get_scaled(self, pmaker, params): # Shape matches expected shape assert_equal(out.shape, params['shape']) - for dtype in np.sctypes['float']: + for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']: out = prox.get_scaled(dtype=dtype) assert_almost_equal(out, params['arr_out']) assert_greater_equal(out.dtype, np.dtype(dtype)) From 5b008b609bc18aeeb614d9158504e46539c87d07 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 17:21:32 -0500 Subject: [PATCH 358/689] MNT: Update nitest-dicom pin --- nibabel-data/nitest-dicom | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel-data/nitest-dicom b/nibabel-data/nitest-dicom index ff6844f3a5..2246c92726 160000 --- a/nibabel-data/nitest-dicom +++ b/nibabel-data/nitest-dicom @@ -1 +1 @@ -Subproject commit ff6844f3a5ef79974c5809a79314c98fd81693cf +Subproject commit 2246c9272658693c02810836bdf820c1c6607624 From bebaf9c13edec33a7784372d1e0ccef40f8549a3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 10 Nov 2019 17:34:22 -0500 Subject: [PATCH 359/689] TEST: Basic tests for CT DICOM image --- nibabel/nicom/tests/test_dicomwrappers.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 1ebf464d0e..c78249c381 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -20,7 +20,7 @@ from nose.tools import (assert_true, assert_false, assert_equal, assert_not_equal, assert_raises) -from numpy.testing import assert_array_equal, assert_array_almost_equal +from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_warns from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data IO_DATA_PATH = pjoin(dirname(__file__), 'data') @@ -39,6 +39,8 @@ DATA_FILE_EMPTY_ST = pjoin(IO_DATA_PATH, 'slicethickness_empty_string.dcm') DATA_FILE_4D_DERIVED = pjoin(get_nibabel_data(), 'nitest-dicom', '4d_multiframe_with_derived.dcm') +DATA_FILE_CT = pjoin(get_nibabel_data(), 'nitest-dicom', + 'siemens_ct_header_csa.dcm') # This affine from our converted image was shown to match our image spatially # with an image from SPM DICOM conversion. We checked the matching with SPM @@ -633,6 +635,13 @@ def test_data_derived_shape(self): dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) assert_equal(dw.image_shape, (96, 96, 60, 33)) + @dicom_test + @needs_nibabel_data('nitest-dicom') + def test_data_unreadable_private_headers(self): + # Test CT image with unreadable CSA tags + dw = assert_warns(UserWarning, didw.wrapper_from_file, DATA_FILE_CT) + assert_equal(dw.image_shape, (512, 571)) + @dicom_test def test_data_fake(self): # Test algorithm for get_data From 7b8a1b82339883c3831f153b88e8549ff0e5e1df Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 12:07:09 -0500 Subject: [PATCH 360/689] moving test_affines to pytest --- nibabel/tests/test_affines.py | 97 +++++++++++++++++------------------ 1 file changed, 48 insertions(+), 49 deletions(-) diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index 13b554c5a8..01121e3874 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -9,10 +9,8 @@ from ..affines import (AffineError, apply_affine, append_diag, to_matvec, from_matvec, dot_reduce, voxel_sizes, obliquity) - -from nose.tools import assert_equal, assert_raises -from numpy.testing import assert_array_equal, assert_almost_equal, \ - assert_array_almost_equal +import pytest +import numpy.testing as npt def validated_apply_affine(T, xyz): @@ -32,28 +30,27 @@ def test_apply_affine(): rng = np.random.RandomState(20110903) aff = np.diag([2, 3, 4, 1]) pts = rng.uniform(size=(4, 3)) - assert_array_equal(apply_affine(aff, pts), - pts * [[2, 3, 4]]) + npt.assert_equal(apply_affine(aff, pts), pts * [[2, 3, 4]]) aff[:3, 3] = [10, 11, 12] - assert_array_equal(apply_affine(aff, pts), - pts * [[2, 3, 4]] + [[10, 11, 12]]) + npt.assert_equal(apply_affine(aff, pts), + pts * [[2, 3, 4]] + [[10, 11, 12]]) aff[:3, :] = rng.normal(size=(3, 4)) exp_res = np.concatenate((pts.T, np.ones((1, 4))), axis=0) exp_res = np.dot(aff, exp_res)[:3, :].T - assert_array_equal(apply_affine(aff, pts), exp_res) + npt.assert_equal(apply_affine(aff, pts), exp_res) # Check we get the same result as the previous implementation - assert_almost_equal(validated_apply_affine(aff, pts), apply_affine(aff, pts)) + npt.assert_almost_equal(validated_apply_affine(aff, pts), apply_affine(aff, pts)) # Check that lists work for inputs - assert_array_equal(apply_affine(aff.tolist(), pts.tolist()), exp_res) + npt.assert_equal(apply_affine(aff.tolist(), pts.tolist()), exp_res) # Check that it's the same as a banal implementation in the simple case aff = np.array([[0, 2, 0, 10], [3, 0, 0, 11], [0, 0, 4, 12], [0, 0, 0, 1]]) pts = np.array([[1, 2, 3], [2, 3, 4], [4, 5, 6], [6, 7, 8]]) exp_res = (np.dot(aff[:3, :3], pts.T) + aff[:3, 3:4]).T - assert_array_equal(apply_affine(aff, pts), exp_res) + npt.assert_equal(apply_affine(aff, pts), exp_res) # That points can be reshaped and you'll get the same shape output pts = pts.reshape((2, 2, 3)) exp_res = exp_res.reshape((2, 2, 3)) - assert_array_equal(apply_affine(aff, pts), exp_res) + npt.assert_equal(apply_affine(aff, pts), exp_res) # That ND also works for N in range(2, 6): aff = np.eye(N) @@ -67,7 +64,7 @@ def test_apply_affine(): exp_pts = np.dot(aff, new_pts) exp_pts = np.rollaxis(exp_pts[:-1, :], 0, 2) exp_res = exp_pts.reshape((2, 3, nd)) - assert_array_almost_equal(res, exp_res) + npt.assert_almost_equal(res, exp_res) def test_matrix_vector(): @@ -78,39 +75,39 @@ def test_matrix_vector(): newmat, newvec = to_matvec(xform) mat = xform[:-1, :-1] vec = xform[:-1, -1] - assert_array_equal(newmat, mat) - assert_array_equal(newvec, vec) - assert_equal(newvec.shape, (M - 1,)) - assert_array_equal(from_matvec(mat, vec), xform) + npt.assert_equal(newmat, mat) + npt.assert_equal(newvec, vec) + npt.assert_equal(newvec.shape, (M - 1,)) + npt.assert_equal(from_matvec(mat, vec), xform) # Check default translation works xform_not = xform[:] xform_not[:-1, :] = 0 - assert_array_equal(from_matvec(mat), xform) - assert_array_equal(from_matvec(mat, None), xform) + npt.assert_equal(from_matvec(mat), xform) + npt.assert_equal(from_matvec(mat, None), xform) # Check array-like works newmat, newvec = to_matvec(xform.tolist()) - assert_array_equal(newmat, mat) - assert_array_equal(newvec, vec) - assert_array_equal(from_matvec(mat.tolist(), vec.tolist()), xform) + npt.assert_equal(newmat, mat) + npt.assert_equal(newvec, vec) + npt.assert_equal(from_matvec(mat.tolist(), vec.tolist()), xform) def test_append_diag(): # Routine for appending diagonal elements - assert_array_equal(append_diag(np.diag([2, 3, 1]), [1]), + npt.assert_equal(append_diag(np.diag([2, 3, 1]), [1]), np.diag([2, 3, 1, 1])) - assert_array_equal(append_diag(np.diag([2, 3, 1]), [1, 1]), + npt.assert_equal(append_diag(np.diag([2, 3, 1]), [1, 1]), np.diag([2, 3, 1, 1, 1])) aff = np.array([[2, 0, 0], [0, 3, 0], [0, 0, 1], [0, 0, 1]]) - assert_array_equal(append_diag(aff, [5], [9]), + npt.assert_equal(append_diag(aff, [5], [9]), [[2, 0, 0, 0], [0, 3, 0, 0], [0, 0, 0, 1], [0, 0, 5, 9], [0, 0, 0, 1]]) - assert_array_equal(append_diag(aff, [5, 6], [9, 10]), + npt.assert_equal(append_diag(aff, [5, 6], [9, 10]), [[2, 0, 0, 0, 0], [0, 3, 0, 0, 0], [0, 0, 0, 0, 1], @@ -120,38 +117,40 @@ def test_append_diag(): aff = np.array([[2, 0, 0, 0], [0, 3, 0, 0], [0, 0, 0, 1]]) - assert_array_equal(append_diag(aff, [5], [9]), + npt.assert_equal(append_diag(aff, [5], [9]), [[2, 0, 0, 0, 0], [0, 3, 0, 0, 0], [0, 0, 0, 5, 9], [0, 0, 0, 0, 1]]) # Length of starts has to match length of steps - assert_raises(AffineError, append_diag, aff, [5, 6], [9]) + with pytest.raises(AffineError): + append_diag(aff, [5, 6], [9]) def test_dot_reduce(): # Chaining numpy dot # Error for no arguments - assert_raises(TypeError, dot_reduce) + with pytest.raises(TypeError): + dot_reduce() # Anything at all on its own, passes through - assert_equal(dot_reduce(1), 1) - assert_equal(dot_reduce(None), None) - assert_equal(dot_reduce([1, 2, 3]), [1, 2, 3]) + npt.assert_equal(dot_reduce(1), 1) + npt.assert_equal(dot_reduce(None), None) + npt.assert_equal(dot_reduce([1, 2, 3]), [1, 2, 3]) # Two or more -> dot product vec = [1, 2, 3] mat = np.arange(4, 13).reshape((3, 3)) - assert_array_equal(dot_reduce(vec, mat), np.dot(vec, mat)) - assert_array_equal(dot_reduce(mat, vec), np.dot(mat, vec)) + npt.assert_equal(dot_reduce(vec, mat), np.dot(vec, mat)) + npt.assert_equal(dot_reduce(mat, vec), np.dot(mat, vec)) mat2 = np.arange(13, 22).reshape((3, 3)) - assert_array_equal(dot_reduce(mat2, vec, mat), - np.dot(mat2, np.dot(vec, mat))) - assert_array_equal(dot_reduce(mat, vec, mat2, ), - np.dot(mat, np.dot(vec, mat2))) + npt.assert_equal(dot_reduce(mat2, vec, mat), + np.dot(mat2, np.dot(vec, mat))) + npt.assert_equal(dot_reduce(mat, vec, mat2, ), + np.dot(mat, np.dot(vec, mat2))) def test_voxel_sizes(): affine = np.diag([2, 3, 4, 1]) - assert_almost_equal(voxel_sizes(affine), [2, 3, 4]) + npt.assert_almost_equal(voxel_sizes(affine), [2, 3, 4]) # Some example rotations rotations = [] for x_rot, y_rot, z_rot in product((0, 0.4), (0, 0.6), (0, 0.8)): @@ -160,16 +159,16 @@ def test_voxel_sizes(): for n in range(2, 10): vox_sizes = np.arange(n) + 4.1 aff = np.diag(list(vox_sizes) + [1]) - assert_almost_equal(voxel_sizes(aff), vox_sizes) + npt.assert_almost_equal(voxel_sizes(aff), vox_sizes) # Translations make no difference aff[:-1, -1] = np.arange(n) + 10 - assert_almost_equal(voxel_sizes(aff), vox_sizes) + npt.assert_almost_equal(voxel_sizes(aff), vox_sizes) # Does not have to be square new_row = np.vstack((np.zeros(n + 1), aff)) - assert_almost_equal(voxel_sizes(new_row), vox_sizes) + npt.assert_almost_equal(voxel_sizes(new_row), vox_sizes) new_col = np.c_[np.zeros(n + 1), aff] - assert_almost_equal(voxel_sizes(new_col), - [0] + list(vox_sizes)) + npt.assert_almost_equal(voxel_sizes(new_col), + [0] + list(vox_sizes)) if n < 3: continue # Rotations do not change the voxel size @@ -177,7 +176,7 @@ def test_voxel_sizes(): rot_affine = np.eye(n + 1) rot_affine[:3, :3] = rotation full_aff = rot_affine.dot(aff) - assert_almost_equal(voxel_sizes(full_aff), vox_sizes) + npt.assert_almost_equal(voxel_sizes(full_aff), vox_sizes) def test_obliquity(): @@ -187,6 +186,6 @@ def test_obliquity(): aligned[:-1, -1] = [-10, -10, -7] R = from_matvec(euler2mat(x=0.09, y=0.001, z=0.001), [0.0, 0.0, 0.0]) oblique = R.dot(aligned) - assert_almost_equal(obliquity(aligned), [0.0, 0.0, 0.0]) - assert_almost_equal(obliquity(oblique) * 180 / pi, - [0.0810285, 5.1569949, 5.1569376]) + npt.assert_almost_equal(obliquity(aligned), [0.0, 0.0, 0.0]) + npt.assert_almost_equal(obliquity(oblique) * 180 / pi, + [0.0810285, 5.1569949, 5.1569376]) From c99ef1274e36f4525ecb526d56080ec91a6a7ade Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 12:16:43 -0500 Subject: [PATCH 361/689] adding pytest to ravis --- .travis.yml | 1 + setup.cfg | 2 ++ 2 files changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 1b81ba296c..b35a0cb1eb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -132,6 +132,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + pytest -v nibabel/tests/test_affines.py else false fi diff --git a/setup.cfg b/setup.cfg index 5630921dd9..3b5bddae19 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,6 +33,7 @@ install_requires = numpy >=1.12 tests_require = nose >=0.11 + pytest mock test_suite = nose.collector zip_safe = False @@ -54,6 +55,7 @@ test = coverage mock nose >=0.11 + pytest all = %(dicom)s %(doc)s From 7b70af7c9623e847b0eef3ffe0d07615306ac2c6 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 12:32:24 -0500 Subject: [PATCH 362/689] adding pytest to the windows env --- .azure-pipelines/windows.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 0cda80d6ee..5c7aa3d713 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -29,10 +29,9 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% - python -m pip install nose mock coverage codecov displayName: 'Install dependencies' - script: | - python -m pip install . + python -m pip install .[test] SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - script: | @@ -40,6 +39,7 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + pytest -v nibabel/tests/test_affines.py displayName: 'Nose tests' - script: | cd for_testing From 491020fdac11d4f7b0e3d61ecb52c4d43263b401 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 12:41:19 -0500 Subject: [PATCH 363/689] testing windows installations --- .azure-pipelines/windows.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 5c7aa3d713..9cfa7a7e83 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -29,9 +29,11 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% + python -m pip install nose mock coverage codecov + python -m pip install pytest displayName: 'Install dependencies' - script: | - python -m pip install .[test] + python -m pip install . SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - script: | From 8868ac99a05ac3614f2329ade7ed616aee41205e Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 12:48:53 -0500 Subject: [PATCH 364/689] trying a different pytest command for windows --- .azure-pipelines/windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 9cfa7a7e83..a719b1f0bf 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -41,7 +41,7 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v nibabel/tests/test_affines.py + pytest -v %CD%\\nibabel\tests\test_affines.py displayName: 'Nose tests' - script: | cd for_testing From 86fe29af08cbeb93c24a3264dfa77ff34ddacb17 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 12:53:37 -0500 Subject: [PATCH 365/689] yet another command for windows --- .azure-pipelines/windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index a719b1f0bf..c28a952343 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -41,7 +41,7 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v %CD%\\nibabel\tests\test_affines.py + pytest -v nibabel\tests\test_affines.py displayName: 'Nose tests' - script: | cd for_testing From eae2b0449dcd76755fda8c5ebcdf0f2b0968adbe Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 13:06:59 -0500 Subject: [PATCH 366/689] fixing path to the test --- .azure-pipelines/windows.yml | 2 +- .travis.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index c28a952343..826c7ddc41 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -41,7 +41,7 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v nibabel\tests\test_affines.py + pytest -v ../nibabel/tests/test_affines.py displayName: 'Nose tests' - script: | cd for_testing diff --git a/.travis.yml b/.travis.yml index b35a0cb1eb..39f1a14a45 100644 --- a/.travis.yml +++ b/.travis.yml @@ -132,7 +132,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v nibabel/tests/test_affines.py + pytest -v ../nibabel/tests/test_affines.py else false fi From 6748c6de2385b9613d6f6a752ce7b23a5951a8e7 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 13:23:47 -0500 Subject: [PATCH 367/689] revert some of the changes to the test (didnt notice numpy.testing was already used) --- nibabel/tests/test_affines.py | 89 ++++++++++++++++++----------------- 1 file changed, 46 insertions(+), 43 deletions(-) diff --git a/nibabel/tests/test_affines.py b/nibabel/tests/test_affines.py index 01121e3874..6fd2f59fab 100644 --- a/nibabel/tests/test_affines.py +++ b/nibabel/tests/test_affines.py @@ -9,8 +9,10 @@ from ..affines import (AffineError, apply_affine, append_diag, to_matvec, from_matvec, dot_reduce, voxel_sizes, obliquity) + import pytest -import numpy.testing as npt +from numpy.testing import assert_array_equal, assert_almost_equal, \ + assert_array_almost_equal def validated_apply_affine(T, xyz): @@ -30,27 +32,28 @@ def test_apply_affine(): rng = np.random.RandomState(20110903) aff = np.diag([2, 3, 4, 1]) pts = rng.uniform(size=(4, 3)) - npt.assert_equal(apply_affine(aff, pts), pts * [[2, 3, 4]]) + assert_array_equal(apply_affine(aff, pts), + pts * [[2, 3, 4]]) aff[:3, 3] = [10, 11, 12] - npt.assert_equal(apply_affine(aff, pts), - pts * [[2, 3, 4]] + [[10, 11, 12]]) + assert_array_equal(apply_affine(aff, pts), + pts * [[2, 3, 4]] + [[10, 11, 12]]) aff[:3, :] = rng.normal(size=(3, 4)) exp_res = np.concatenate((pts.T, np.ones((1, 4))), axis=0) exp_res = np.dot(aff, exp_res)[:3, :].T - npt.assert_equal(apply_affine(aff, pts), exp_res) + assert_array_equal(apply_affine(aff, pts), exp_res) # Check we get the same result as the previous implementation - npt.assert_almost_equal(validated_apply_affine(aff, pts), apply_affine(aff, pts)) + assert_almost_equal(validated_apply_affine(aff, pts), apply_affine(aff, pts)) # Check that lists work for inputs - npt.assert_equal(apply_affine(aff.tolist(), pts.tolist()), exp_res) + assert_array_equal(apply_affine(aff.tolist(), pts.tolist()), exp_res) # Check that it's the same as a banal implementation in the simple case aff = np.array([[0, 2, 0, 10], [3, 0, 0, 11], [0, 0, 4, 12], [0, 0, 0, 1]]) pts = np.array([[1, 2, 3], [2, 3, 4], [4, 5, 6], [6, 7, 8]]) exp_res = (np.dot(aff[:3, :3], pts.T) + aff[:3, 3:4]).T - npt.assert_equal(apply_affine(aff, pts), exp_res) + assert_array_equal(apply_affine(aff, pts), exp_res) # That points can be reshaped and you'll get the same shape output pts = pts.reshape((2, 2, 3)) exp_res = exp_res.reshape((2, 2, 3)) - npt.assert_equal(apply_affine(aff, pts), exp_res) + assert_array_equal(apply_affine(aff, pts), exp_res) # That ND also works for N in range(2, 6): aff = np.eye(N) @@ -64,7 +67,7 @@ def test_apply_affine(): exp_pts = np.dot(aff, new_pts) exp_pts = np.rollaxis(exp_pts[:-1, :], 0, 2) exp_res = exp_pts.reshape((2, 3, nd)) - npt.assert_almost_equal(res, exp_res) + assert_array_almost_equal(res, exp_res) def test_matrix_vector(): @@ -75,39 +78,39 @@ def test_matrix_vector(): newmat, newvec = to_matvec(xform) mat = xform[:-1, :-1] vec = xform[:-1, -1] - npt.assert_equal(newmat, mat) - npt.assert_equal(newvec, vec) - npt.assert_equal(newvec.shape, (M - 1,)) - npt.assert_equal(from_matvec(mat, vec), xform) + assert_array_equal(newmat, mat) + assert_array_equal(newvec, vec) + assert newvec.shape == (M - 1,) + assert_array_equal(from_matvec(mat, vec), xform) # Check default translation works xform_not = xform[:] xform_not[:-1, :] = 0 - npt.assert_equal(from_matvec(mat), xform) - npt.assert_equal(from_matvec(mat, None), xform) + assert_array_equal(from_matvec(mat), xform) + assert_array_equal(from_matvec(mat, None), xform) # Check array-like works newmat, newvec = to_matvec(xform.tolist()) - npt.assert_equal(newmat, mat) - npt.assert_equal(newvec, vec) - npt.assert_equal(from_matvec(mat.tolist(), vec.tolist()), xform) + assert_array_equal(newmat, mat) + assert_array_equal(newvec, vec) + assert_array_equal(from_matvec(mat.tolist(), vec.tolist()), xform) def test_append_diag(): # Routine for appending diagonal elements - npt.assert_equal(append_diag(np.diag([2, 3, 1]), [1]), + assert_array_equal(append_diag(np.diag([2, 3, 1]), [1]), np.diag([2, 3, 1, 1])) - npt.assert_equal(append_diag(np.diag([2, 3, 1]), [1, 1]), + assert_array_equal(append_diag(np.diag([2, 3, 1]), [1, 1]), np.diag([2, 3, 1, 1, 1])) aff = np.array([[2, 0, 0], [0, 3, 0], [0, 0, 1], [0, 0, 1]]) - npt.assert_equal(append_diag(aff, [5], [9]), + assert_array_equal(append_diag(aff, [5], [9]), [[2, 0, 0, 0], [0, 3, 0, 0], [0, 0, 0, 1], [0, 0, 5, 9], [0, 0, 0, 1]]) - npt.assert_equal(append_diag(aff, [5, 6], [9, 10]), + assert_array_equal(append_diag(aff, [5, 6], [9, 10]), [[2, 0, 0, 0, 0], [0, 3, 0, 0, 0], [0, 0, 0, 0, 1], @@ -117,7 +120,7 @@ def test_append_diag(): aff = np.array([[2, 0, 0, 0], [0, 3, 0, 0], [0, 0, 0, 1]]) - npt.assert_equal(append_diag(aff, [5], [9]), + assert_array_equal(append_diag(aff, [5], [9]), [[2, 0, 0, 0, 0], [0, 3, 0, 0, 0], [0, 0, 0, 5, 9], @@ -133,24 +136,24 @@ def test_dot_reduce(): with pytest.raises(TypeError): dot_reduce() # Anything at all on its own, passes through - npt.assert_equal(dot_reduce(1), 1) - npt.assert_equal(dot_reduce(None), None) - npt.assert_equal(dot_reduce([1, 2, 3]), [1, 2, 3]) + assert dot_reduce(1) == 1 + assert dot_reduce(None) is None + assert dot_reduce([1, 2, 3]) == [1, 2, 3] # Two or more -> dot product vec = [1, 2, 3] mat = np.arange(4, 13).reshape((3, 3)) - npt.assert_equal(dot_reduce(vec, mat), np.dot(vec, mat)) - npt.assert_equal(dot_reduce(mat, vec), np.dot(mat, vec)) + assert_array_equal(dot_reduce(vec, mat), np.dot(vec, mat)) + assert_array_equal(dot_reduce(mat, vec), np.dot(mat, vec)) mat2 = np.arange(13, 22).reshape((3, 3)) - npt.assert_equal(dot_reduce(mat2, vec, mat), - np.dot(mat2, np.dot(vec, mat))) - npt.assert_equal(dot_reduce(mat, vec, mat2, ), - np.dot(mat, np.dot(vec, mat2))) + assert_array_equal(dot_reduce(mat2, vec, mat), + np.dot(mat2, np.dot(vec, mat))) + assert_array_equal(dot_reduce(mat, vec, mat2, ), + np.dot(mat, np.dot(vec, mat2))) def test_voxel_sizes(): affine = np.diag([2, 3, 4, 1]) - npt.assert_almost_equal(voxel_sizes(affine), [2, 3, 4]) + assert_almost_equal(voxel_sizes(affine), [2, 3, 4]) # Some example rotations rotations = [] for x_rot, y_rot, z_rot in product((0, 0.4), (0, 0.6), (0, 0.8)): @@ -159,16 +162,16 @@ def test_voxel_sizes(): for n in range(2, 10): vox_sizes = np.arange(n) + 4.1 aff = np.diag(list(vox_sizes) + [1]) - npt.assert_almost_equal(voxel_sizes(aff), vox_sizes) + assert_almost_equal(voxel_sizes(aff), vox_sizes) # Translations make no difference aff[:-1, -1] = np.arange(n) + 10 - npt.assert_almost_equal(voxel_sizes(aff), vox_sizes) + assert_almost_equal(voxel_sizes(aff), vox_sizes) # Does not have to be square new_row = np.vstack((np.zeros(n + 1), aff)) - npt.assert_almost_equal(voxel_sizes(new_row), vox_sizes) + assert_almost_equal(voxel_sizes(new_row), vox_sizes) new_col = np.c_[np.zeros(n + 1), aff] - npt.assert_almost_equal(voxel_sizes(new_col), - [0] + list(vox_sizes)) + assert_almost_equal(voxel_sizes(new_col), + [0] + list(vox_sizes)) if n < 3: continue # Rotations do not change the voxel size @@ -176,7 +179,7 @@ def test_voxel_sizes(): rot_affine = np.eye(n + 1) rot_affine[:3, :3] = rotation full_aff = rot_affine.dot(aff) - npt.assert_almost_equal(voxel_sizes(full_aff), vox_sizes) + assert_almost_equal(voxel_sizes(full_aff), vox_sizes) def test_obliquity(): @@ -186,6 +189,6 @@ def test_obliquity(): aligned[:-1, -1] = [-10, -10, -7] R = from_matvec(euler2mat(x=0.09, y=0.001, z=0.001), [0.0, 0.0, 0.0]) oblique = R.dot(aligned) - npt.assert_almost_equal(obliquity(aligned), [0.0, 0.0, 0.0]) - npt.assert_almost_equal(obliquity(oblique) * 180 / pi, - [0.0810285, 5.1569949, 5.1569376]) + assert_almost_equal(obliquity(aligned), [0.0, 0.0, 0.0]) + assert_almost_equal(obliquity(oblique) * 180 / pi, + [0.0810285, 5.1569949, 5.1569376]) From 64e1ad3762dcc0de3e17787c0d8b13737794ed8a Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 11 Nov 2019 14:24:59 -0500 Subject: [PATCH 368/689] git --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index df018f0ead..fd686f2781 100644 --- a/.gitignore +++ b/.gitignore @@ -84,3 +84,4 @@ Thumbs.db doc/source/reference venv/ .buildbot.patch +.vscode From ea25f7eea8ef18db29c2f484e93502cfdc1d976c Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 14:45:08 -0500 Subject: [PATCH 369/689] adding testing_pytest that removed nose (it will be used temp. for tests converted to pytest) --- nibabel/testing_pytest/__init__.py | 223 ++++++++++++++++++++++++++ nibabel/testing_pytest/np_features.py | 23 +++ 2 files changed, 246 insertions(+) create mode 100644 nibabel/testing_pytest/__init__.py create mode 100644 nibabel/testing_pytest/np_features.py diff --git a/nibabel/testing_pytest/__init__.py b/nibabel/testing_pytest/__init__.py new file mode 100644 index 0000000000..675c506e3b --- /dev/null +++ b/nibabel/testing_pytest/__init__.py @@ -0,0 +1,223 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the NiBabel package for the +# copyright and license terms. +# +### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +''' Utilities for testing ''' + +import re +import os +import sys +import warnings +from pkg_resources import resource_filename +from os.path import dirname, abspath, join as pjoin + +import numpy as np +from numpy.testing import assert_array_equal, assert_warns +from numpy.testing import dec +skipif = dec.skipif +slow = dec.slow + +from ..deprecated import deprecate_with_version as _deprecate_with_version + + +from itertools import zip_longest + + +def test_data(subdir=None, fname=None): + if subdir is None: + resource = os.path.join('tests', 'data') + elif subdir in ('gifti', 'nicom', 'externals'): + resource = os.path.join(subdir, 'tests', 'data') + else: + raise ValueError("Unknown test data directory: %s" % subdir) + + if fname is not None: + resource = os.path.join(resource, fname) + + return resource_filename('nibabel', resource) + + +# set path to example data +data_path = test_data() + + +from .np_features import memmap_after_ufunc + +def assert_dt_equal(a, b): + """ Assert two numpy dtype specifiers are equal + + Avoids failed comparison between int32 / int64 and intp + """ + assert np.dtype(a).str == np.dtype(b).str + + +def assert_allclose_safely(a, b, match_nans=True, rtol=1e-5, atol=1e-8): + """ Allclose in integers go all wrong for large integers + """ + a = np.atleast_1d(a) # 0d arrays cannot be indexed + a, b = np.broadcast_arrays(a, b) + if match_nans: + nans = np.isnan(a) + np.testing.assert_array_equal(nans, np.isnan(b)) + to_test = ~nans + else: + to_test = np.ones(a.shape, dtype=bool) + # Deal with float128 inf comparisons (bug in numpy 1.9.2) + # np.allclose(np.float128(np.inf), np.float128(np.inf)) == False + to_test = to_test & (a != b) + a = a[to_test] + b = b[to_test] + if a.dtype.kind in 'ui': + a = a.astype(float) + if b.dtype.kind in 'ui': + b = b.astype(float) + assert np.allclose(a, b, rtol=rtol, atol=atol) + + +def assert_arrays_equal(arrays1, arrays2): + """ Check two iterables yield the same sequence of arrays. """ + for arr1, arr2 in zip_longest(arrays1, arrays2, fillvalue=None): + assert (arr1 is not None and arr2 is not None) + assert_array_equal(arr1, arr2) + + +def assert_re_in(regex, c, flags=0): + """Assert that container (list, str, etc) contains entry matching the regex + """ + if not isinstance(c, (list, tuple)): + c = [c] + for e in c: + if re.match(regex, e, flags=flags): + return + raise AssertionError("Not a single entry matched %r in %r" % (regex, c)) + + +def get_fresh_mod(mod_name=__name__): + # Get this module, with warning registry empty + my_mod = sys.modules[mod_name] + try: + my_mod.__warningregistry__.clear() + except AttributeError: + pass + return my_mod + + +class clear_and_catch_warnings(warnings.catch_warnings): + """ Context manager that resets warning registry for catching warnings + + Warnings can be slippery, because, whenever a warning is triggered, Python + adds a ``__warningregistry__`` member to the *calling* module. This makes + it impossible to retrigger the warning in this module, whatever you put in + the warnings filters. This context manager accepts a sequence of `modules` + as a keyword argument to its constructor and: + + * stores and removes any ``__warningregistry__`` entries in given `modules` + on entry; + * resets ``__warningregistry__`` to its previous state on exit. + + This makes it possible to trigger any warning afresh inside the context + manager without disturbing the state of warnings outside. + + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + Parameters + ---------- + record : bool, optional + Specifies whether warnings should be captured by a custom + implementation of ``warnings.showwarning()`` and be appended to a list + returned by the context manager. Otherwise None is returned by the + context manager. The objects appended to the list are arguments whose + attributes mirror the arguments to ``showwarning()``. + + NOTE: nibabel difference from numpy: default is True + + modules : sequence, optional + Sequence of modules for which to reset warnings registry on entry and + restore on exit + + Examples + -------- + >>> import warnings + >>> with clear_and_catch_warnings(modules=[np.core.fromnumeric]): + ... warnings.simplefilter('always') + ... # do something that raises a warning in np.core.fromnumeric + """ + class_modules = () + + def __init__(self, record=True, modules=()): + self.modules = set(modules).union(self.class_modules) + self._warnreg_copies = {} + super(clear_and_catch_warnings, self).__init__(record=record) + + def __enter__(self): + for mod in self.modules: + if hasattr(mod, '__warningregistry__'): + mod_reg = mod.__warningregistry__ + self._warnreg_copies[mod] = mod_reg.copy() + mod_reg.clear() + return super(clear_and_catch_warnings, self).__enter__() + + def __exit__(self, *exc_info): + super(clear_and_catch_warnings, self).__exit__(*exc_info) + for mod in self.modules: + if hasattr(mod, '__warningregistry__'): + mod.__warningregistry__.clear() + if mod in self._warnreg_copies: + mod.__warningregistry__.update(self._warnreg_copies[mod]) + + +class error_warnings(clear_and_catch_warnings): + """ Context manager to check for warnings as errors. Usually used with + ``assert_raises`` in the with block + + Examples + -------- + >>> with error_warnings(): + ... try: + ... warnings.warn('Message', UserWarning) + ... except UserWarning: + ... print('I consider myself warned') + I consider myself warned + """ + filter = 'error' + + def __enter__(self): + mgr = super(error_warnings, self).__enter__() + warnings.simplefilter(self.filter) + return mgr + + +class suppress_warnings(error_warnings): + """ Version of ``catch_warnings`` class that suppresses warnings + """ + filter = 'ignore' + + +@_deprecate_with_version('catch_warn_reset is deprecated; use ' + 'nibabel.testing.clear_and_catch_warnings.', + since='2.1.0', until='3.0.0') +class catch_warn_reset(clear_and_catch_warnings): + pass + + +EXTRA_SET = os.environ.get('NIPY_EXTRA_TESTS', '').split(',') + + +def runif_extra_has(test_str): + """Decorator checks to see if NIPY_EXTRA_TESTS env var contains test_str""" + return skipif(test_str not in EXTRA_SET, + "Skip {0} tests.".format(test_str)) + + +def assert_arr_dict_equal(dict1, dict2): + """ Assert that two dicts are equal, where dicts contain arrays + """ + assert set(dict1) == set(dict2) + for key, value1 in dict1.items(): + value2 = dict2[key] + assert_array_equal(value1, value2) diff --git a/nibabel/testing_pytest/np_features.py b/nibabel/testing_pytest/np_features.py new file mode 100644 index 0000000000..8919542d1c --- /dev/null +++ b/nibabel/testing_pytest/np_features.py @@ -0,0 +1,23 @@ +""" Look for changes in numpy behavior over versions +""" + +import numpy as np + + +def memmap_after_ufunc(): + """ Return True if ufuncs on memmap arrays always return memmap arrays + + This should be True for numpy < 1.12, False otherwise. + + Memoize after first call. We do this to avoid having to call this when + importing nibabel.testing, because we cannot depend on the source file + being present - see gh-571. + """ + if memmap_after_ufunc.result is not None: + return memmap_after_ufunc.result + with open(__file__, 'rb') as fobj: + mm_arr = np.memmap(fobj, mode='r', shape=(10,), dtype=np.uint8) + memmap_after_ufunc.result = isinstance(mm_arr + 1, np.memmap) + return memmap_after_ufunc.result + +memmap_after_ufunc.result = None From 98861a03b78dc33c0fcfc08598d77e5f61edd8de Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 11 Nov 2019 15:32:30 -0500 Subject: [PATCH 370/689] converted test_volumeutils to pytest --- .azure-pipelines/windows.yml | 2 +- .travis.yml | 2 +- nibabel/tests/test_volumeutils.py | 535 +++++++++++++++--------------- 3 files changed, 268 insertions(+), 271 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 826c7ddc41..2d63db68e0 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -41,7 +41,7 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_affines.py + pytest -v ../nibabel/tests/test_affines.py ../nibabel/tests/test_volumeutils.py displayName: 'Nose tests' - script: | cd for_testing diff --git a/.travis.yml b/.travis.yml index 39f1a14a45..ea4eb22291 100644 --- a/.travis.yml +++ b/.travis.yml @@ -132,7 +132,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_affines.py + pytest -v ../nibabel/tests/test_affines.py ../nibabel/tests/test_volumeutils.py else false fi diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 6eeb6c6e55..12f93ef70d 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -20,6 +20,7 @@ import bz2 import threading import time +import pytest import numpy as np @@ -54,10 +55,8 @@ from numpy.testing import (assert_array_almost_equal, assert_array_equal) -from nose.tools import assert_true, assert_false, assert_equal, assert_raises - -from ..testing import (assert_dt_equal, assert_allclose_safely, - suppress_warnings, clear_and_catch_warnings) +from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, + suppress_warnings, clear_and_catch_warnings) #: convenience variables for numpy types FLOAT_TYPES = np.sctypes['float'] @@ -77,7 +76,7 @@ def test__is_compressed_fobj(): fname = 'test.bin' + ext for mode in ('wb', 'rb'): fobj = opener(fname, mode) - assert_equal(_is_compressed_fobj(fobj), compressed) + assert _is_compressed_fobj(fobj) == compressed fobj.close() @@ -108,20 +107,20 @@ def make_array(n, bytes): contents1 = bytearray(4 * n) fobj_r.readinto(contents1) # Second element is 1 - assert_false(contents1[0:8] == b'\x00' * 8) + assert contents1[0:8] != b'\x00' * 8 out_arr = make_array(n, contents1) assert_array_equal(in_arr, out_arr) # Set second element to 0 out_arr[1] = 0 # Show this changed the bytes string - assert_equal(contents1[:8], b'\x00' * 8) + assert contents1[:8] == b'\x00' * 8 # Reread, to get unmodified contents fobj_r.seek(0) contents2 = bytearray(4 * n) fobj_r.readinto(contents2) out_arr2 = make_array(n, contents2) assert_array_equal(in_arr, out_arr2) - assert_equal(out_arr[1], 0) + assert out_arr[1] == 0 finally: fobj_r.close() os.unlink(fname) @@ -133,30 +132,30 @@ def test_array_from_file(): in_arr = np.arange(24, dtype=dtype).reshape(shape) # Check on string buffers offset = 0 - assert_true(buf_chk(in_arr, BytesIO(), None, offset)) + assert buf_chk(in_arr, BytesIO(), None, offset) offset = 10 - assert_true(buf_chk(in_arr, BytesIO(), None, offset)) + assert buf_chk(in_arr, BytesIO(), None, offset) # check on real file fname = 'test.bin' with InTemporaryDirectory(): # fortran ordered out_buf = open(fname, 'wb') in_buf = open(fname, 'rb') - assert_true(buf_chk(in_arr, out_buf, in_buf, offset)) + assert buf_chk(in_arr, out_buf, in_buf, offset) # Drop offset to check that shape's not coming from file length out_buf.seek(0) in_buf.seek(0) offset = 5 - assert_true(buf_chk(in_arr, out_buf, in_buf, offset)) + assert buf_chk(in_arr, out_buf, in_buf, offset) del out_buf, in_buf # Make sure empty shape, and zero length, give empty arrays arr = array_from_file((), np.dtype('f8'), BytesIO()) - assert_equal(len(arr), 0) + assert len(arr) == 0 arr = array_from_file((0,), np.dtype('f8'), BytesIO()) - assert_equal(len(arr), 0) + assert len(arr) == 0 # Check error from small file - assert_raises(IOError, array_from_file, - shape, dtype, BytesIO()) + with pytest.raises(IOError): + array_from_file(shape, dtype, BytesIO()) # check on real file fd, fname = tempfile.mkstemp() with InTemporaryDirectory(): @@ -164,8 +163,8 @@ def test_array_from_file(): in_buf = open(fname, 'rb') # For windows this will raise a WindowsError from mmap, Unices # appear to raise an IOError - assert_raises(Exception, array_from_file, - shape, dtype, in_buf) + with pytest.raises(Exception): + array_from_file(shape, dtype, in_buf) del in_buf @@ -180,35 +179,35 @@ def test_array_from_file_mmap(): with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj) assert_array_equal(res, arr) - assert_true(isinstance(res, np.memmap)) - assert_equal(res.mode, 'c') + assert isinstance(res, np.memmap) + assert res.mode == 'c' with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj, mmap=True) assert_array_equal(res, arr) - assert_true(isinstance(res, np.memmap)) - assert_equal(res.mode, 'c') + assert isinstance(res, np.memmap) + assert res.mode == 'c' with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj, mmap='c') assert_array_equal(res, arr) - assert_true(isinstance(res, np.memmap)) - assert_equal(res.mode, 'c') + assert isinstance(res, np.memmap) + assert res.mode == 'c' with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj, mmap='r') assert_array_equal(res, arr) - assert_true(isinstance(res, np.memmap)) - assert_equal(res.mode, 'r') + assert isinstance(res, np.memmap) + assert res.mode == 'r' with open('test.bin', 'rb+') as fobj: res = array_from_file(shape, dt, fobj, mmap='r+') assert_array_equal(res, arr) - assert_true(isinstance(res, np.memmap)) - assert_equal(res.mode, 'r+') + assert isinstance(res, np.memmap) + assert res.mode == 'r+' with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj, mmap=False) assert_array_equal(res, arr) - assert_false(isinstance(res, np.memmap)) + assert not isinstance(res, np.memmap) with open('test.bin', 'rb') as fobj: - assert_raises(ValueError, - array_from_file, shape, dt, fobj, mmap='p') + with pytest.raises(ValueError): + array_from_file(shape, dt, fobj, mmap='p') def buf_chk(in_arr, out_buf, in_buf, offset): @@ -276,7 +275,7 @@ def test_array_from_file_reread(): out_arr = array_from_file(shape, dtt, fobj_r, offset, order) assert_array_equal(in_arr, out_arr) out_arr[..., 0] = -1 - assert_false(np.allclose(in_arr, out_arr)) + assert not np.allclose(in_arr, out_arr) out_arr2 = array_from_file(shape, dtt, fobj_r, offset, order) assert_array_equal(in_arr, out_arr2) finally: @@ -336,7 +335,7 @@ def test_a2f_upscale(): back = apply_read_scaling(raw, slope, inter) top = back - arr score = np.abs(top / arr) - assert_true(np.all(score < 10)) + assert np.all(score < 10) def test_a2f_min_max(): @@ -547,13 +546,12 @@ def test_a2f_scaled_unscaled(): nan_fill = np.round(nan_fill) # nan2zero will check whether 0 in scaled to a valid value in output if (in_dtype in CFLOAT_TYPES and not mn_out <= nan_fill <= mx_out): - assert_raises(ValueError, - array_to_file, - arr, - fobj, - out_dtype=out_dtype, - divslope=divslope, - intercept=intercept) + with pytest.raises(ValueError): + array_to_file(arr, + fobj, + out_dtype=out_dtype, + divslope=divslope, + intercept=intercept) continue with suppress_warnings(): back_arr = write_return(arr, fobj, @@ -625,13 +623,12 @@ def test_a2f_bad_scaling(): intercept=inter, divslope=slope)) else: - assert_raises(ValueError, - array_to_file, - arr, - fobj, - np.int8, - intercept=inter, - divslope=slope) + with pytest.raises(ValueError): + array_to_file(arr, + fobj, + np.int8, + intercept=inter, + divslope=slope) def test_a2f_nan2zero_range(): @@ -668,8 +665,10 @@ def test_a2f_nan2zero_range(): # Errors from datatype threshold after scaling back_arr = write_return(arr, fobj, np.int8, intercept=128) assert_array_equal([-128, -128, -127, -128], back_arr) - assert_raises(ValueError, write_return, arr, fobj, np.int8, intercept=129) - assert_raises(ValueError, write_return, arr_no_nan, fobj, np.int8, intercept=129) + with pytest.raises(ValueError): + write_return(arr, fobj, np.int8, intercept=129) + with pytest.raises(ValueError): + write_return(arr_no_nan, fobj, np.int8, intercept=129) # OK with nan2zero false, but we get whatever nan casts to nan_cast = np.array(np.nan).astype(np.int8) back_arr = write_return(arr, fobj, np.int8, intercept=129, nan2zero=False) @@ -677,10 +676,10 @@ def test_a2f_nan2zero_range(): # divslope back_arr = write_return(arr, fobj, np.int8, intercept=256, divslope=2) assert_array_equal([-128, -128, -128, -128], back_arr) - assert_raises(ValueError, write_return, arr, fobj, np.int8, - intercept=257.1, divslope=2) - assert_raises(ValueError, write_return, arr_no_nan, fobj, np.int8, - intercept=257.1, divslope=2) + with pytest.raises(ValueError): + write_return(arr, fobj, np.int8, intercept=257.1, divslope=2) + with pytest.raises(ValueError): + write_return(arr_no_nan, fobj, np.int8, intercept=257.1, divslope=2) # OK with nan2zero false back_arr = write_return(arr, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=False) @@ -705,8 +704,10 @@ def test_a2f_non_numeric(): back_arr = write_return(arr, fobj, float) assert_array_equal(back_arr, arr.astype(float)) # mn, mx never work for structured types - assert_raises(ValueError, write_return, arr, fobj, float, mn=0) - assert_raises(ValueError, write_return, arr, fobj, float, mx=10) + with pytest.raises(ValueError): + write_return(arr, fobj, float, mn=0) + with pytest.raises(ValueError): + write_return(arr, fobj, float, mx=10) def write_return(data, fileobj, out_dtype, *args, **kwargs): @@ -720,42 +721,39 @@ def write_return(data, fileobj, out_dtype, *args, **kwargs): def test_apply_scaling(): # Null scaling, same array returned arr = np.zeros((3,), dtype=np.int16) - assert_true(apply_read_scaling(arr) is arr) - assert_true(apply_read_scaling(arr, np.float64(1.0)) is arr) - assert_true(apply_read_scaling(arr, inter=np.float64(0)) is arr) + assert apply_read_scaling(arr) is arr + assert apply_read_scaling(arr, np.float64(1.0)) is arr + assert apply_read_scaling(arr, inter=np.float64(0)) is arr f32, f64 = np.float32, np.float64 f32_arr = np.zeros((1,), dtype=f32) i16_arr = np.zeros((1,), dtype=np.int16) # Check float upcast (not the normal numpy scalar rule) # This is the normal rule - no upcast from scalar - assert_equal((f32_arr * f64(1)).dtype, np.float32) - assert_equal((f32_arr + f64(1)).dtype, np.float32) + assert (f32_arr * f64(1)).dtype == np.float32 + assert (f32_arr + f64(1)).dtype == np.float32 # The function does upcast though - ret = apply_read_scaling(np.float32(0), np.float64(2)) - assert_equal(ret.dtype, np.float64) - ret = apply_read_scaling(np.float32(0), inter=np.float64(2)) - assert_equal(ret.dtype, np.float64) + ret=apply_read_scaling(np.float32(0), np.float64(2)) + assert ret.dtype == np.float64 + ret=apply_read_scaling(np.float32(0), inter=np.float64(2)) + assert ret.dtype == np.float64 # Check integer inf upcast - big = f32(type_info(f32)['max']) + big=f32(type_info(f32)['max']) # Normally this would not upcast - assert_equal((i16_arr * big).dtype, np.float32) + assert (i16_arr * big).dtype == np.float32 # An equivalent case is a little hard to find for the intercept - nmant_32 = type_info(np.float32)['nmant'] - big_delta = np.float32(2**(floor_log2(big) - nmant_32)) - assert_equal((i16_arr * big_delta + big).dtype, np.float32) + nmant_32=type_info(np.float32)['nmant'] + big_delta=np.float32(2**(floor_log2(big) - nmant_32)) + assert (i16_arr * big_delta + big).dtype == np.float32 # Upcasting does occur with this routine - assert_equal(apply_read_scaling(i16_arr, big).dtype, np.float64) - assert_equal(apply_read_scaling(i16_arr, big_delta, big).dtype, np.float64) + assert apply_read_scaling(i16_arr, big).dtype == np.float64 + assert apply_read_scaling(i16_arr, big_delta, big).dtype == np.float64 # If float32 passed, no overflow, float32 returned - assert_equal(apply_read_scaling(np.int8(0), f32(-1.0), f32(0.0)).dtype, - np.float32) + assert apply_read_scaling(np.int8(0), f32(-1.0), f32(0.0)).dtype == np.float32 # float64 passed, float64 returned - assert_equal(apply_read_scaling(np.int8(0), -1.0, 0.0).dtype, np.float64) + assert apply_read_scaling(np.int8(0), -1.0, 0.0).dtype == np.float64 # float32 passed, overflow, float64 returned - assert_equal(apply_read_scaling(np.int8(0), f32(1e38), f32(0.0)).dtype, - np.float64) - assert_equal(apply_read_scaling(np.int8(0), f32(-1e38), f32(0.0)).dtype, - np.float64) + assert apply_read_scaling(np.int8(0), f32(1e38), f32(0.0)).dtype == np.float64 + assert apply_read_scaling(np.int8(0), f32(-1e38), f32(0.0)).dtype == np.float64 # Non-zero intercept still generates floats assert_dt_equal(apply_read_scaling(i16_arr, 1.0, 1.0).dtype, float) assert_dt_equal(apply_read_scaling( @@ -766,7 +764,7 @@ def test_apply_scaling(): def test_apply_read_scaling_ints(): # Test that apply_read_scaling copes with integer scaling inputs - arr = np.arange(10, dtype=np.int16) + arr=np.arange(10, dtype=np.int16) assert_array_equal(apply_read_scaling(arr, 1, 0), arr) assert_array_equal(apply_read_scaling(arr, 1, 1), arr + 1) assert_array_equal(apply_read_scaling(arr, 2, 1), arr * 2 + 1) @@ -774,7 +772,7 @@ def test_apply_read_scaling_ints(): def test_apply_read_scaling_nones(): # Check that we can pass None as slope and inter to apply read scaling - arr = np.arange(10, dtype=np.int16) + arr=np.arange(10, dtype=np.int16) assert_array_equal(apply_read_scaling(arr, None, None), arr) assert_array_equal(apply_read_scaling(arr, 2, None), arr * 2) assert_array_equal(apply_read_scaling(arr, None, 1), arr + 1) @@ -782,10 +780,10 @@ def test_apply_read_scaling_nones(): def test_int_scinter(): # Finding float type needed for applying scale, offset to ints - assert_equal(int_scinter_ftype(np.int8, 1.0, 0.0), np.float32) - assert_equal(int_scinter_ftype(np.int8, -1.0, 0.0), np.float32) - assert_equal(int_scinter_ftype(np.int8, 1e38, 0.0), np.float64) - assert_equal(int_scinter_ftype(np.int8, -1e38, 0.0), np.float64) + assert int_scinter_ftype(np.int8, 1.0, 0.0) == np.float32 + assert int_scinter_ftype(np.int8, -1.0, 0.0) == np.float32 + assert int_scinter_ftype(np.int8, 1e38, 0.0) == np.float64 + assert int_scinter_ftype(np.int8, -1e38, 0.0) == np.float64 def test_working_type(): @@ -794,32 +792,32 @@ def test_working_type(): # need this because of the very confusing np.int32 != np.intp (on 32 bit). def wt(*args, **kwargs): return np.dtype(working_type(*args, **kwargs)).str - d1 = np.atleast_1d + d1=np.atleast_1d for in_type in NUMERIC_TYPES: - in_ts = np.dtype(in_type).str - assert_equal(wt(in_type), in_ts) - assert_equal(wt(in_type, 1, 0), in_ts) - assert_equal(wt(in_type, 1.0, 0.0), in_ts) - in_val = d1(in_type(0)) + in_ts=np.dtype(in_type).str + assert wt(in_type) == in_ts + assert wt(in_type, 1, 0) == in_ts + assert wt(in_type, 1.0, 0.0) == in_ts + in_val=d1(in_type(0)) for slope_type in NUMERIC_TYPES: - sl_val = slope_type(1) # no scaling, regardless of type - assert_equal(wt(in_type, sl_val, 0.0), in_ts) - sl_val = slope_type(2) # actual scaling - out_val = in_val / d1(sl_val) - assert_equal(wt(in_type, sl_val), out_val.dtype.str) + sl_val=slope_type(1) # no scaling, regardless of type + assert wt(in_type, sl_val, 0.0) == in_ts + sl_val=slope_type(2) # actual scaling + out_val=in_val / d1(sl_val) + assert wt(in_type, sl_val) == out_val.dtype.str for inter_type in NUMERIC_TYPES: - i_val = inter_type(0) # no scaling, regardless of type - assert_equal(wt(in_type, 1, i_val), in_ts) - i_val = inter_type(1) # actual scaling - out_val = in_val - d1(i_val) - assert_equal(wt(in_type, 1, i_val), out_val.dtype.str) + i_val=inter_type(0) # no scaling, regardless of type + assert wt(in_type, 1, i_val) == in_ts + i_val=inter_type(1) # actual scaling + out_val=in_val - d1(i_val) + assert wt(in_type, 1, i_val) == out_val.dtype.str # Combine scaling and intercept - out_val = (in_val - d1(i_val)) / d1(sl_val) - assert_equal(wt(in_type, sl_val, i_val), out_val.dtype.str) + out_val=(in_val - d1(i_val)) / d1(sl_val) + assert wt(in_type, sl_val, i_val) == out_val.dtype.str # Confirm that type codes and dtypes work as well - f32s = np.dtype(np.float32).str - assert_equal(wt('f4', 1, 0), f32s) - assert_equal(wt(np.dtype('f4'), 1, 0), f32s) + f32s=np.dtype(np.float32).str + assert wt('f4', 1, 0) == f32s + assert wt(np.dtype('f4'), 1, 0) == f32s def test_better_float(): @@ -828,18 +826,16 @@ def check_against(f1, f2): return f1 if FLOAT_TYPES.index(f1) >= FLOAT_TYPES.index(f2) else f2 for first in FLOAT_TYPES: for other in IUINT_TYPES + np.sctypes['complex']: - assert_equal(better_float_of(first, other), first) - assert_equal(better_float_of(other, first), first) + assert better_float_of(first, other) == first + assert better_float_of(other, first) == first for other2 in IUINT_TYPES + np.sctypes['complex']: - assert_equal(better_float_of(other, other2), np.float32) - assert_equal(better_float_of(other, other2, np.float64), - np.float64) + assert better_float_of(other, other2) == np.float32 + assert better_float_of(other, other2, np.float64) == np.float64 for second in FLOAT_TYPES: - assert_equal(better_float_of(first, second), - check_against(first, second)) + assert better_float_of(first, second) == check_against(first, second) # Check codes and dtypes work - assert_equal(better_float_of('f4', 'f8', 'f4'), np.float64) - assert_equal(better_float_of('i4', 'i8', 'f8'), np.float64) + assert better_float_of('f4', 'f8', 'f4') == np.float64 + assert better_float_of('i4', 'i8', 'f8') == np.float64 def test_best_write_scale_ftype(): @@ -847,44 +843,40 @@ def test_best_write_scale_ftype(): # Types return better of (default, array type) unless scale overflows. # Return float type cannot be less capable than the input array type for dtt in IUINT_TYPES + FLOAT_TYPES: - arr = np.arange(10, dtype=dtt) - assert_equal(best_write_scale_ftype(arr, 1, 0), - better_float_of(dtt, np.float32)) - assert_equal(best_write_scale_ftype(arr, 1, 0, np.float64), - better_float_of(dtt, np.float64)) - assert_equal(best_write_scale_ftype(arr, np.float32(2), 0), - better_float_of(dtt, np.float32)) - assert_equal(best_write_scale_ftype(arr, 1, np.float32(1)), - better_float_of(dtt, np.float32)) + arr=np.arange(10, dtype=dtt) + assert best_write_scale_ftype(arr, 1, 0) == better_float_of(dtt, np.float32) + assert best_write_scale_ftype(arr, 1, 0, np.float64) == better_float_of(dtt, np.float64) + assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of(dtt, np.float32) + assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of(dtt, np.float32) # Overflowing ints with scaling results in upcast - best_vals = ((np.float32, np.float64),) + best_vals=((np.float32, np.float64),) if np.longdouble in OK_FLOATS: best_vals += ((np.float64, np.longdouble),) for lower_t, higher_t in best_vals: # Information on this float - L_info = type_info(lower_t) - t_max = L_info['max'] - nmant = L_info['nmant'] # number of significand digits - big_delta = lower_t(2**(floor_log2(t_max) - nmant)) # delta below max + L_info=type_info(lower_t) + t_max=L_info['max'] + nmant=L_info['nmant'] # number of significand digits + big_delta=lower_t(2**(floor_log2(t_max) - nmant)) # delta below max # Even large values that don't overflow don't change output - arr = np.array([0, t_max], dtype=lower_t) - assert_equal(best_write_scale_ftype(arr, 1, 0), lower_t) + arr=np.array([0, t_max], dtype=lower_t) + assert best_write_scale_ftype(arr, 1, 0) == lower_t # Scaling > 1 reduces output values, so no upcast needed - assert_equal(best_write_scale_ftype(arr, lower_t(1.01), 0), lower_t) + assert best_write_scale_ftype(arr, lower_t(1.01), 0) == lower_t # Scaling < 1 increases values, so upcast may be needed (and is here) - assert_equal(best_write_scale_ftype(arr, lower_t(0.99), 0), higher_t) + assert best_write_scale_ftype(arr, lower_t(0.99), 0) == higher_t # Large minus offset on large array can cause upcast - assert_equal(best_write_scale_ftype(arr, 1, -big_delta / 2.01), lower_t) - assert_equal(best_write_scale_ftype(arr, 1, -big_delta / 2.0), higher_t) + assert best_write_scale_ftype(arr, 1, -big_delta / 2.01) == lower_t + assert best_write_scale_ftype(arr, 1, -big_delta / 2.0) == higher_t # With infs already in input, default type returns - arr[0] = np.inf - assert_equal(best_write_scale_ftype(arr, lower_t(0.5), 0), lower_t) - arr[0] = -np.inf - assert_equal(best_write_scale_ftype(arr, lower_t(0.5), 0), lower_t) + arr[0]=np.inf + assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t + arr[0]=-np.inf + assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t def test_can_cast(): - tests = ((np.float32, np.float32, True, True, True), + tests=((np.float32, np.float32, True, True, True), (np.float64, np.float32, True, True, True), (np.complex128, np.float32, False, False, False), (np.float32, np.complex128, True, True, True), @@ -900,46 +892,46 @@ def test_can_cast(): (np.uint16, np.uint8, False, True, True), ) for intype, outtype, def_res, scale_res, all_res in tests: - assert_equal(def_res, can_cast(intype, outtype)) - assert_equal(scale_res, can_cast(intype, outtype, False, True)) - assert_equal(all_res, can_cast(intype, outtype, True, True)) + assert def_res == can_cast(intype, outtype) + assert scale_res == can_cast(intype, outtype, False, True) + assert all_res == can_cast(intype, outtype, True, True) def test_write_zeros(): - bio = BytesIO() + bio=BytesIO() write_zeros(bio, 10000) - assert_equal(bio.getvalue(), b'\x00' * 10000) + assert bio.getvalue() == b'\x00' * 10000 bio.seek(0) bio.truncate(0) write_zeros(bio, 10000, 256) - assert_equal(bio.getvalue(), b'\x00' * 10000) + assert bio.getvalue() == b'\x00' * 10000 bio.seek(0) bio.truncate(0) write_zeros(bio, 200, 256) - assert_equal(bio.getvalue(), b'\x00' * 200) + assert bio.getvalue() == b'\x00' * 200 def test_seek_tell(): # Test seek tell routine - bio = BytesIO() - in_files = bio, 'test.bin', 'test.gz', 'test.bz2' - start = 10 - end = 100 - diff = end - start - tail = 7 + bio=BytesIO() + in_files=bio, 'test.bin', 'test.gz', 'test.bz2' + start=10 + end=100 + diff=end - start + tail=7 with InTemporaryDirectory(): for in_file, write0 in itertools.product(in_files, (False, True)): - st = functools.partial(seek_tell, write0=write0) + st=functools.partial(seek_tell, write0=write0) bio.seek(0) # First write the file with ImageOpener(in_file, 'wb') as fobj: - assert_equal(fobj.tell(), 0) + assert fobj.tell() == 0 # already at position - OK st(fobj, 0) - assert_equal(fobj.tell(), 0) + assert fobj.tell() == 0 # Move position by writing fobj.write(b'\x01' * start) - assert_equal(fobj.tell(), start) + assert fobj.tell() == start # Files other than BZ2Files can seek forward on write, leaving # zeros in their wake. BZ2Files can't seek when writing, unless # we enable the write0 flag to seek_tell @@ -948,66 +940,68 @@ def test_seek_tell(): fobj.write(b'\x00' * diff) else: st(fobj, end) - assert_equal(fobj.tell(), end) + assert fobj.tell() == end # Write tail fobj.write(b'\x02' * tail) bio.seek(0) # Now read back the file testing seek_tell in reading mode with ImageOpener(in_file, 'rb') as fobj: - assert_equal(fobj.tell(), 0) + assert fobj.tell() == 0 st(fobj, 0) - assert_equal(fobj.tell(), 0) + assert fobj.tell() == 0 st(fobj, start) - assert_equal(fobj.tell(), start) + assert fobj.tell() == start st(fobj, end) - assert_equal(fobj.tell(), end) + assert fobj.tell() == end # Seek anywhere works in read mode for all files st(fobj, 0) bio.seek(0) # Check we have the expected written output with ImageOpener(in_file, 'rb') as fobj: - assert_equal(fobj.read(), - b'\x01' * start + b'\x00' * diff + b'\x02' * tail) + assert fobj.read() == b'\x01' * start + b'\x00' * diff + b'\x02' * tail for in_file in ('test2.gz', 'test2.bz2'): # Check failure of write seek backwards with ImageOpener(in_file, 'wb') as fobj: fobj.write(b'g' * 10) - assert_equal(fobj.tell(), 10) + assert fobj.tell() == 10 seek_tell(fobj, 10) - assert_equal(fobj.tell(), 10) - assert_raises(IOError, seek_tell, fobj, 5) + assert fobj.tell() == 10 + with pytest.raises(IOError): + seek_tell(fobj, 5) # Make sure read seeks don't affect file with ImageOpener(in_file, 'rb') as fobj: seek_tell(fobj, 10) seek_tell(fobj, 0) with ImageOpener(in_file, 'rb') as fobj: - assert_equal(fobj.read(), b'g' * 10) + assert fobj.read() == b'g' * 10 def test_seek_tell_logic(): # Test logic of seek_tell write0 with dummy class # Seek works? OK - bio = BytesIO() + bio=BytesIO() seek_tell(bio, 10) - assert_equal(bio.tell(), 10) + assert bio.tell() == 10 class BabyBio(BytesIO): def seek(self, *args): raise IOError() - bio = BabyBio() + bio=BabyBio() # Fresh fileobj, position 0, can't seek - error - assert_raises(IOError, bio.seek, 10) + with pytest.raises(IOError): + bio.seek(10) # Put fileobj in correct position by writing - ZEROB = b'\x00' + ZEROB=b'\x00' bio.write(ZEROB * 10) seek_tell(bio, 10) # already there, nothing to do - assert_equal(bio.tell(), 10) - assert_equal(bio.getvalue(), ZEROB * 10) + assert bio.tell() == 10 + assert bio.getvalue() == ZEROB * 10 # Try write zeros to get to new position - assert_raises(IOError, bio.seek, 20) + with pytest.raises(IOError): + bio.seek(20) seek_tell(bio, 20, write0=True) - assert_equal(bio.getvalue(), ZEROB * 20) + assert bio.getvalue() == ZEROB * 20 def test_fname_ext_ul_case(): @@ -1016,23 +1010,23 @@ def test_fname_ext_ul_case(): with open('afile.TXT', 'wt') as fobj: fobj.write('Interesting information') # OSX usually has case-insensitive file systems; Windows also - os_cares_case = not exists('afile.txt') + os_cares_case=not exists('afile.txt') with open('bfile.txt', 'wt') as fobj: fobj.write('More interesting information') # If there is no file, the case doesn't change - assert_equal(fname_ext_ul_case('nofile.txt'), 'nofile.txt') - assert_equal(fname_ext_ul_case('nofile.TXT'), 'nofile.TXT') + assert fname_ext_ul_case('nofile.txt') == 'nofile.txt' + assert fname_ext_ul_case('nofile.TXT') == 'nofile.TXT' # If there is a file, accept upper or lower case for ext if os_cares_case: - assert_equal(fname_ext_ul_case('afile.txt'), 'afile.TXT') - assert_equal(fname_ext_ul_case('bfile.TXT'), 'bfile.txt') + assert fname_ext_ul_case('afile.txt') == 'afile.TXT' + assert fname_ext_ul_case('bfile.TXT') == 'bfile.txt' else: - assert_equal(fname_ext_ul_case('afile.txt'), 'afile.txt') - assert_equal(fname_ext_ul_case('bfile.TXT'), 'bfile.TXT') - assert_equal(fname_ext_ul_case('afile.TXT'), 'afile.TXT') - assert_equal(fname_ext_ul_case('bfile.txt'), 'bfile.txt') + assert fname_ext_ul_case('afile.txt') == 'afile.txt' + assert fname_ext_ul_case('bfile.TXT') == 'bfile.TXT' + assert fname_ext_ul_case('afile.TXT') == 'afile.TXT' + assert fname_ext_ul_case('bfile.txt') == 'bfile.txt' # Not mixed case though - assert_equal(fname_ext_ul_case('afile.TxT'), 'afile.TxT') + assert fname_ext_ul_case('afile.TxT') == 'afile.TxT' def test_allopen(): @@ -1041,72 +1035,72 @@ def test_allopen(): with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) # Test default mode is 'rb' - fobj = allopen(__file__) + fobj=allopen(__file__) # Check we got the deprecation warning - assert_equal(len(w), 1) - assert_equal(fobj.mode, 'rb') + assert len(w) == 1 + assert fobj.mode == 'rb' # That we can set it - fobj = allopen(__file__, 'r') - assert_equal(fobj.mode, 'r') + fobj=allopen(__file__, 'r') + assert fobj.mode == 'r' # with keyword arguments - fobj = allopen(__file__, mode='r') - assert_equal(fobj.mode, 'r') + fobj=allopen(__file__, mode='r') + assert fobj.mode == 'r' # fileobj returns fileobj - msg = b'tiddle pom' - sobj = BytesIO(msg) - fobj = allopen(sobj) - assert_equal(fobj.read(), msg) + msg=b'tiddle pom' + sobj=BytesIO(msg) + fobj=allopen(sobj) + assert fobj.read() == msg # mode is gently ignored - fobj = allopen(sobj, mode='r') + fobj=allopen(sobj, mode='r') def test_allopen_compresslevel(): # We can set the default compression level with the module global # Get some data to compress with open(__file__, 'rb') as fobj: - my_self = fobj.read() + my_self=fobj.read() # Prepare loop - fname = 'test.gz' - sizes = {} + fname='test.gz' + sizes={} # Stash module global from .. import volumeutils as vu - original_compress_level = vu.default_compresslevel - assert_equal(original_compress_level, 1) + original_compress_level=vu.default_compresslevel + assert original_compress_level == 1 try: with InTemporaryDirectory(): for compresslevel in ('default', 1, 9): if compresslevel != 'default': - vu.default_compresslevel = compresslevel + vu.default_compresslevel=compresslevel with warnings.catch_warnings(): warnings.simplefilter("ignore") with allopen(fname, 'wb') as fobj: fobj.write(my_self) with open(fname, 'rb') as fobj: - my_selves_smaller = fobj.read() - sizes[compresslevel] = len(my_selves_smaller) - assert_equal(sizes['default'], sizes[1]) - assert_true(sizes[1] > sizes[9]) + my_selves_smaller=fobj.read() + sizes[compresslevel]=len(my_selves_smaller) + assert sizes['default'] == sizes[1] + assert sizes[1] > sizes[9] finally: - vu.default_compresslevel = original_compress_level + vu.default_compresslevel=original_compress_level def test_shape_zoom_affine(): - shape = (3, 5, 7) - zooms = (3, 2, 1) - res = shape_zoom_affine(shape, zooms) - exp = np.array([[-3., 0., 0., 3.], + shape=(3, 5, 7) + zooms=(3, 2, 1) + res=shape_zoom_affine(shape, zooms) + exp=np.array([[-3., 0., 0., 3.], [0., 2., 0., -4.], [0., 0., 1., -3.], [0., 0., 0., 1.]]) assert_array_almost_equal(res, exp) - res = shape_zoom_affine((3, 5), (3, 2)) - exp = np.array([[-3., 0., 0., 3.], + res=shape_zoom_affine((3, 5), (3, 2)) + exp=np.array([[-3., 0., 0., 3.], [0., 2., 0., -4.], [0., 0., 1., -0.], [0., 0., 0., 1.]]) assert_array_almost_equal(res, exp) - res = shape_zoom_affine(shape, zooms, False) - exp = np.array([[3., 0., 0., -3.], + res=shape_zoom_affine(shape, zooms, False) + exp=np.array([[3., 0., 0., -3.], [0., 2., 0., -4.], [0., 0., 1., -3.], [0., 0., 0., 1.]]) @@ -1114,9 +1108,9 @@ def test_shape_zoom_affine(): def test_rec2dict(): - r = np.zeros((), dtype=[('x', 'i4'), ('s', 'S10')]) - d = rec2dict(r) - assert_equal(d, {'x': 0, 's': b''}) + r=np.zeros((), dtype=[('x', 'i4'), ('s', 'S10')]) + d=rec2dict(r) + assert d == {'x': 0, 's': b''} def test_dtypes(): @@ -1127,43 +1121,45 @@ def test_dtypes(): # In [10]: dtype(' Date: Mon, 11 Nov 2019 15:50:39 -0500 Subject: [PATCH 371/689] fixed spacing --- nibabel/tests/test_volumeutils.py | 872 ++++++++++++++++-------------- 1 file changed, 466 insertions(+), 406 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 12f93ef70d..34c2196b93 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test for volumeutils module ''' +""" Test for volumeutils module """ import os from os.path import exists @@ -27,54 +27,60 @@ from ..tmpdirs import InTemporaryDirectory from ..openers import ImageOpener from .. import volumeutils -from ..volumeutils import (array_from_file, - _is_compressed_fobj, - array_to_file, - allopen, # for backwards compatibility - fname_ext_ul_case, - calculate_scale, - can_cast, - write_zeros, - seek_tell, - apply_read_scaling, - working_type, - best_write_scale_ftype, - better_float_of, - int_scinter_ftype, - make_dt_codes, - native_code, - shape_zoom_affine, - rec2dict, - _dt_min_max, - _write_data, - _ftype4scaled_finite, - ) +from ..volumeutils import ( + array_from_file, + _is_compressed_fobj, + array_to_file, + allopen, # for backwards compatibility + fname_ext_ul_case, + calculate_scale, + can_cast, + write_zeros, + seek_tell, + apply_read_scaling, + working_type, + best_write_scale_ftype, + better_float_of, + int_scinter_ftype, + make_dt_codes, + native_code, + shape_zoom_affine, + rec2dict, + _dt_min_max, + _write_data, + _ftype4scaled_finite, +) from ..openers import Opener, BZ2File -from ..casting import (floor_log2, type_info, OK_FLOATS, shared_range) +from ..casting import floor_log2, type_info, OK_FLOATS, shared_range -from numpy.testing import (assert_array_almost_equal, - assert_array_equal) +from numpy.testing import assert_array_almost_equal, assert_array_equal -from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, - suppress_warnings, clear_and_catch_warnings) +from ..testing_pytest import ( + assert_dt_equal, + assert_allclose_safely, + suppress_warnings, + clear_and_catch_warnings, +) #: convenience variables for numpy types -FLOAT_TYPES = np.sctypes['float'] -COMPLEX_TYPES = np.sctypes['complex'] +FLOAT_TYPES = np.sctypes["float"] +COMPLEX_TYPES = np.sctypes["complex"] CFLOAT_TYPES = FLOAT_TYPES + COMPLEX_TYPES -INT_TYPES = np.sctypes['int'] -IUINT_TYPES = INT_TYPES + np.sctypes['uint'] +INT_TYPES = np.sctypes["int"] +IUINT_TYPES = INT_TYPES + np.sctypes["uint"] NUMERIC_TYPES = CFLOAT_TYPES + IUINT_TYPES def test__is_compressed_fobj(): # _is_compressed helper function with InTemporaryDirectory(): - for ext, opener, compressed in (('', open, False), - ('.gz', gzip.open, True), - ('.bz2', BZ2File, True)): - fname = 'test.bin' + ext - for mode in ('wb', 'rb'): + for ext, opener, compressed in ( + ("", open, False), + (".gz", gzip.open, True), + (".bz2", BZ2File, True), + ): + fname = "test.bin" + ext + for mode in ("wb", "rb"): fobj = opener(fname, mode) assert _is_compressed_fobj(fobj) == compressed fobj.close() @@ -91,29 +97,29 @@ def make_array(n, bytes): return arr # Check whether file, gzip file, bz2 file reread memory from cache - fname = 'test.bin' + fname = "test.bin" with InTemporaryDirectory(): for n, opener in itertools.product( - (256, 1024, 2560, 25600), - (open, gzip.open, BZ2File)): + (256, 1024, 2560, 25600), (open, gzip.open, BZ2File) + ): in_arr = np.arange(n, dtype=dtype) # Write array to file - fobj_w = opener(fname, 'wb') + fobj_w = opener(fname, "wb") fobj_w.write(in_arr.tostring()) fobj_w.close() # Read back from file - fobj_r = opener(fname, 'rb') + fobj_r = opener(fname, "rb") try: contents1 = bytearray(4 * n) fobj_r.readinto(contents1) # Second element is 1 - assert contents1[0:8] != b'\x00' * 8 + assert contents1[0:8] != b"\x00" * 8 out_arr = make_array(n, contents1) assert_array_equal(in_arr, out_arr) # Set second element to 0 out_arr[1] = 0 # Show this changed the bytes string - assert contents1[:8] == b'\x00' * 8 + assert contents1[:8] == b"\x00" * 8 # Reread, to get unmodified contents fobj_r.seek(0) contents2 = bytearray(4 * n) @@ -136,11 +142,11 @@ def test_array_from_file(): offset = 10 assert buf_chk(in_arr, BytesIO(), None, offset) # check on real file - fname = 'test.bin' + fname = "test.bin" with InTemporaryDirectory(): # fortran ordered - out_buf = open(fname, 'wb') - in_buf = open(fname, 'rb') + out_buf = open(fname, "wb") + in_buf = open(fname, "rb") assert buf_chk(in_arr, out_buf, in_buf, offset) # Drop offset to check that shape's not coming from file length out_buf.seek(0) @@ -149,9 +155,9 @@ def test_array_from_file(): assert buf_chk(in_arr, out_buf, in_buf, offset) del out_buf, in_buf # Make sure empty shape, and zero length, give empty arrays - arr = array_from_file((), np.dtype('f8'), BytesIO()) + arr = array_from_file((), np.dtype("f8"), BytesIO()) assert len(arr) == 0 - arr = array_from_file((0,), np.dtype('f8'), BytesIO()) + arr = array_from_file((0,), np.dtype("f8"), BytesIO()) assert len(arr) == 0 # Check error from small file with pytest.raises(IOError): @@ -159,8 +165,8 @@ def test_array_from_file(): # check on real file fd, fname = tempfile.mkstemp() with InTemporaryDirectory(): - open(fname, 'wb').write(b'1') - in_buf = open(fname, 'rb') + open(fname, "wb").write(b"1") + in_buf = open(fname, "rb") # For windows this will raise a WindowsError from mmap, Unices # appear to raise an IOError with pytest.raises(Exception): @@ -174,55 +180,51 @@ def test_array_from_file_mmap(): with InTemporaryDirectory(): for dt in (np.int16, np.float): arr = np.arange(np.prod(shape), dtype=dt).reshape(shape) - with open('test.bin', 'wb') as fobj: - fobj.write(arr.tostring(order='F')) - with open('test.bin', 'rb') as fobj: + with open("test.bin", "wb") as fobj: + fobj.write(arr.tostring(order="F")) + with open("test.bin", "rb") as fobj: res = array_from_file(shape, dt, fobj) assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == 'c' - with open('test.bin', 'rb') as fobj: + assert res.mode == "c" + with open("test.bin", "rb") as fobj: res = array_from_file(shape, dt, fobj, mmap=True) assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == 'c' - with open('test.bin', 'rb') as fobj: - res = array_from_file(shape, dt, fobj, mmap='c') + assert res.mode == "c" + with open("test.bin", "rb") as fobj: + res = array_from_file(shape, dt, fobj, mmap="c") assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == 'c' - with open('test.bin', 'rb') as fobj: - res = array_from_file(shape, dt, fobj, mmap='r') + assert res.mode == "c" + with open("test.bin", "rb") as fobj: + res = array_from_file(shape, dt, fobj, mmap="r") assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == 'r' - with open('test.bin', 'rb+') as fobj: - res = array_from_file(shape, dt, fobj, mmap='r+') + assert res.mode == "r" + with open("test.bin", "rb+") as fobj: + res = array_from_file(shape, dt, fobj, mmap="r+") assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == 'r+' - with open('test.bin', 'rb') as fobj: + assert res.mode == "r+" + with open("test.bin", "rb") as fobj: res = array_from_file(shape, dt, fobj, mmap=False) assert_array_equal(res, arr) assert not isinstance(res, np.memmap) - with open('test.bin', 'rb') as fobj: + with open("test.bin", "rb") as fobj: with pytest.raises(ValueError): - array_from_file(shape, dt, fobj, mmap='p') + array_from_file(shape, dt, fobj, mmap="p") def buf_chk(in_arr, out_buf, in_buf, offset): - ''' Write contents of in_arr into fileobj, read back, check same ''' - instr = b' ' * offset + in_arr.tostring(order='F') + """ Write contents of in_arr into fileobj, read back, check same """ + instr = b" " * offset + in_arr.tostring(order="F") out_buf.write(instr) out_buf.flush() if in_buf is None: # we're using in_buf from out_buf out_buf.seek(0) in_buf = out_buf - arr = array_from_file( - in_arr.shape, - in_arr.dtype, - in_buf, - offset) + arr = array_from_file(in_arr.shape, in_arr.dtype, in_buf, offset) return np.allclose(in_arr, arr) @@ -232,14 +234,13 @@ def test_array_from_file_openers(): dtype = np.dtype(np.float32) in_arr = np.arange(24, dtype=dtype).reshape(shape) with InTemporaryDirectory(): - for ext, offset in itertools.product(('', '.gz', '.bz2'), - (0, 5, 10)): - fname = 'test.bin' + ext - with Opener(fname, 'wb') as out_buf: + for ext, offset in itertools.product(("", ".gz", ".bz2"), (0, 5, 10)): + fname = "test.bin" + ext + with Opener(fname, "wb") as out_buf: if offset != 0: # avoid https://bugs.python.org/issue16828 - out_buf.write(b' ' * offset) - out_buf.write(in_arr.tostring(order='F')) - with Opener(fname, 'rb') as in_buf: + out_buf.write(b" " * offset) + out_buf.write(in_arr.tostring(order="F")) + with Opener(fname, "rb") as in_buf: out_arr = array_from_file(shape, dtype, in_buf, offset) assert_array_almost_equal(in_arr, out_arr) # Delete object holding onto file for Windows @@ -251,25 +252,26 @@ def test_array_from_file_reread(): # This is the live check for the generic checks in # test_fobj_string_assumptions offset = 9 - fname = 'test.bin' + fname = "test.bin" with InTemporaryDirectory(): for shape, opener, dtt, order in itertools.product( - ((64,), (64, 65), (64, 65, 66)), - (open, gzip.open, bz2.BZ2File, BytesIO), - (np.int16, np.float32), - ('F', 'C')): + ((64,), (64, 65), (64, 65, 66)), + (open, gzip.open, bz2.BZ2File, BytesIO), + (np.int16, np.float32), + ("F", "C"), + ): n_els = np.prod(shape) in_arr = np.arange(n_els, dtype=dtt).reshape(shape) - is_bio = hasattr(opener, 'getvalue') + is_bio = hasattr(opener, "getvalue") # Write array to file - fobj_w = opener() if is_bio else opener(fname, 'wb') - fobj_w.write(b' ' * offset) + fobj_w = opener() if is_bio else opener(fname, "wb") + fobj_w.write(b" " * offset) fobj_w.write(in_arr.tostring(order=order)) if is_bio: fobj_r = fobj_w else: fobj_w.close() - fobj_r = opener(fname, 'rb') + fobj_r = opener(fname, "rb") # Read back from file try: out_arr = array_from_file(shape, dtt, fobj_r, offset, order) @@ -291,14 +293,14 @@ def test_array_to_file(): str_io = BytesIO() for tp in (np.uint64, np.float, np.complex): dt = np.dtype(tp) - for code in '<>': + for code in "<>": ndt = dt.newbyteorder(code) for allow_intercept in (True, False): with suppress_warnings(): # deprecated - scale, intercept, mn, mx = \ - calculate_scale(arr, ndt, allow_intercept) - data_back = write_return(arr, str_io, ndt, - 0, intercept, scale) + scale, intercept, mn, mx = calculate_scale( + arr, ndt, allow_intercept + ) + data_back = write_return(arr, str_io, ndt, 0, intercept, scale) assert_array_almost_equal(arr, data_back) # Test array-like str_io = BytesIO() @@ -324,13 +326,20 @@ def test_a2f_upscale(): # Test values discovered from stress testing. The largish value (2**115) # overflows to inf after the intercept is subtracted, using float32 as the # working precision. The difference between inf and this value is lost. - arr = np.array([[info['min'], 2**115, info['max']]], dtype=np.float32) - slope = np.float32(2**121) - inter = info['min'] + arr = np.array([[info["min"], 2 ** 115, info["max"]]], dtype=np.float32) + slope = np.float32(2 ** 121) + inter = info["min"] str_io = BytesIO() # We need to provide mn, mx for function to be able to calculate upcasting - array_to_file(arr, str_io, np.uint8, intercept=inter, divslope=slope, - mn=info['min'], mx=info['max']) + array_to_file( + arr, + str_io, + np.uint8, + intercept=inter, + divslope=slope, + mn=info["min"], + mx=info["max"], + ) raw = array_from_file(arr.shape, np.uint8, str_io) back = apply_read_scaling(raw, slope, inter) top = back - arr @@ -345,11 +354,11 @@ def test_a2f_min_max(): for out_dt in (np.float32, np.int8): arr = np.arange(4, dtype=in_dt) # min thresholding - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_back = write_return(arr, str_io, out_dt, 0, 0, 1, 1) assert_array_equal(data_back, [1, 1, 2, 3]) # max thresholding - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_back = write_return(arr, str_io, out_dt, 0, 0, 1, None, 2) assert_array_equal(data_back, [0, 1, 2, 2]) # min max thresholding @@ -374,13 +383,13 @@ def test_a2f_order(): arr = np.array([0.0, 1.0, 2.0]) str_io = BytesIO() # order makes no difference in 1D case - data_back = write_return(arr, str_io, ndt, order='C') + data_back = write_return(arr, str_io, ndt, order="C") assert_array_equal(data_back, [0.0, 1.0, 2.0]) # but does in the 2D case arr = np.array([[0.0, 1.0], [2.0, 3.0]]) - data_back = write_return(arr, str_io, ndt, order='F') + data_back = write_return(arr, str_io, ndt, order="F") assert_array_equal(data_back, arr) - data_back = write_return(arr, str_io, ndt, order='C') + data_back = write_return(arr, str_io, ndt, order="C") assert_array_equal(data_back, arr.T) @@ -394,12 +403,12 @@ def test_a2f_nan2zero(): # True is the default, but just to show it's possible data_back = write_return(arr, str_io, ndt, nan2zero=True) assert_array_equal(data_back, arr) - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_back = write_return(arr, str_io, np.int64, nan2zero=True) assert_array_equal(data_back, [[0, 0], [0, 0]]) # otherwise things get a bit weird; tidied here # How weird? Look at arr.astype(np.int64) - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_back = write_return(arr, str_io, np.int64, nan2zero=False) assert_array_equal(data_back, arr.astype(np.int64)) @@ -418,18 +427,16 @@ def test_a2f_nan2zero_scaling(): # Array values including zero before scaling but not after bio = BytesIO() for in_dt, out_dt, zero_in, inter in itertools.product( - FLOAT_TYPES, - IUINT_TYPES, - (True, False), - (0, -100)): + FLOAT_TYPES, IUINT_TYPES, (True, False), (0, -100) + ): in_info = np.finfo(in_dt) out_info = np.iinfo(out_dt) - mx = min(in_info.max, out_info.max * 2., 2**32) + inter + mx = min(in_info.max, out_info.max * 2.0, 2 ** 32) + inter mn = 0 if zero_in or inter else 100 vals = [np.nan] + [mn, mx] nan_arr = np.array(vals, dtype=in_dt) zero_arr = np.nan_to_num(nan_arr) - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): back_nan = write_return(nan_arr, bio, np.int64, intercept=inter) back_zero = write_return(zero_arr, bio, np.int64, intercept=inter) assert_array_equal(back_nan, back_zero) @@ -439,7 +446,7 @@ def test_a2f_offset(): # check that non-zero file offset works arr = np.array([[0.0, 1.0], [2.0, 3.0]]) str_io = BytesIO() - str_io.write(b'a' * 42) + str_io.write(b"a" * 42) array_to_file(arr, str_io, np.float, 42) data_back = array_from_file(arr.shape, np.float, str_io, 42) assert_array_equal(data_back, arr.astype(np.float)) @@ -481,22 +488,30 @@ def test_a2f_zeros(): def test_a2f_big_scalers(): # Check that clip works even for overflowing scalers / data info = type_info(np.float32) - arr = np.array([info['min'], 0, info['max']], dtype=np.float32) + arr = np.array([info["min"], 0, info["max"]], dtype=np.float32) str_io = BytesIO() # Intercept causes overflow - does routine scale correctly? # We check whether the routine correctly clips extreme values. # We need nan2zero=False because we can't represent 0 in the input, given # the scaling and the output range. with suppress_warnings(): # overflow - array_to_file(arr, str_io, np.int8, intercept=np.float32(2**120), - nan2zero=False) + array_to_file( + arr, str_io, np.int8, intercept=np.float32(2 ** 120), nan2zero=False + ) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, -128, 127]) # Scales also if mx, mn specified? Same notes and complaints as for the test # above. str_io.seek(0) - array_to_file(arr, str_io, np.int8, mn=info['min'], mx=info['max'], - intercept=np.float32(2**120), nan2zero=False) + array_to_file( + arr, + str_io, + np.int8, + mn=info["min"], + mx=info["max"], + intercept=np.float32(2 ** 120), + nan2zero=False, + ) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, -128, 127]) # And if slope causes overflow? @@ -507,8 +522,9 @@ def test_a2f_big_scalers(): assert_array_equal(data_back, [-128, 0, 127]) # with mn, mx specified? str_io.seek(0) - array_to_file(arr, str_io, np.int8, mn=info['min'], mx=info['max'], - divslope=np.float32(0.5)) + array_to_file( + arr, str_io, np.int8, mn=info["min"], mx=info["max"], divslope=np.float32(0.5) + ) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, 0, 127]) @@ -518,13 +534,13 @@ def test_a2f_int_scaling(): arr = np.array([0, 1, 128, 255], dtype=np.uint8) fobj = BytesIO() back_arr = write_return(arr, fobj, np.uint8, intercept=1) - assert_array_equal(back_arr, np.clip(arr - 1., 0, 255)) + assert_array_equal(back_arr, np.clip(arr - 1.0, 0, 255)) back_arr = write_return(arr, fobj, np.uint8, divslope=2) - assert_array_equal(back_arr, np.round(np.clip(arr / 2., 0, 255))) + assert_array_equal(back_arr, np.round(np.clip(arr / 2.0, 0, 255))) back_arr = write_return(arr, fobj, np.uint8, intercept=1, divslope=2) - assert_array_equal(back_arr, np.round(np.clip((arr - 1.) / 2., 0, 255))) + assert_array_equal(back_arr, np.round(np.clip((arr - 1.0) / 2.0, 0, 255))) back_arr = write_return(arr, fobj, np.int16, intercept=1, divslope=2) - assert_array_equal(back_arr, np.round((arr - 1.) / 2.)) + assert_array_equal(back_arr, np.round((arr - 1.0) / 2.0)) def test_a2f_scaled_unscaled(): @@ -532,10 +548,8 @@ def test_a2f_scaled_unscaled(): # without scaling fobj = BytesIO() for in_dtype, out_dtype, intercept, divslope in itertools.product( - NUMERIC_TYPES, - NUMERIC_TYPES, - (0, 0.5, -1, 1), - (1, 0.5, 2)): + NUMERIC_TYPES, NUMERIC_TYPES, (0, 0.5, -1, 1), (1, 0.5, 2) + ): mn_in, mx_in = _dt_min_max(in_dtype) nan_val = np.nan if in_dtype in CFLOAT_TYPES else 10 arr = np.array([mn_in, -1, 0, 1, mx_in, nan_val], dtype=in_dtype) @@ -545,31 +559,32 @@ def test_a2f_scaled_unscaled(): if out_dtype in IUINT_TYPES: nan_fill = np.round(nan_fill) # nan2zero will check whether 0 in scaled to a valid value in output - if (in_dtype in CFLOAT_TYPES and not mn_out <= nan_fill <= mx_out): + if in_dtype in CFLOAT_TYPES and not mn_out <= nan_fill <= mx_out: with pytest.raises(ValueError): - array_to_file(arr, - fobj, - out_dtype=out_dtype, - divslope=divslope, - intercept=intercept) + array_to_file( + arr, + fobj, + out_dtype=out_dtype, + divslope=divslope, + intercept=intercept, + ) continue with suppress_warnings(): - back_arr = write_return(arr, fobj, - out_dtype=out_dtype, - divslope=divslope, - intercept=intercept) + back_arr = write_return( + arr, fobj, out_dtype=out_dtype, divslope=divslope, intercept=intercept + ) exp_back = arr.copy() - if (in_dtype in IUINT_TYPES and - out_dtype in IUINT_TYPES and - (intercept, divslope) == (0, 1)): + if ( + in_dtype in IUINT_TYPES + and out_dtype in IUINT_TYPES + and (intercept, divslope) == (0, 1) + ): # Direct iu to iu casting. # Need to clip if ranges not the same. # Use smaller of input, output range to avoid np.clip upcasting # the array because of large clip limits. if (mn_in, mx_in) != (mn_out, mx_out): - exp_back = np.clip(exp_back, - max(mn_in, mn_out), - min(mx_in, mx_out)) + exp_back = np.clip(exp_back, max(mn_in, mn_out), min(mx_in, mx_out)) else: # Need to deal with nans, casting to float, clipping if in_dtype in CFLOAT_TYPES and out_dtype in IUINT_TYPES: exp_back[np.isnan(exp_back)] = 0 @@ -579,8 +594,7 @@ def test_a2f_scaled_unscaled(): exp_back -= intercept if divslope != 1: exp_back /= divslope - if (exp_back.dtype.type in CFLOAT_TYPES and - out_dtype in IUINT_TYPES): + if exp_back.dtype.type in CFLOAT_TYPES and out_dtype in IUINT_TYPES: exp_back = np.round(exp_back).astype(float) exp_back = np.clip(exp_back, *shared_range(float, out_dtype)) exp_back = exp_back.astype(out_dtype) @@ -600,35 +614,28 @@ def test_a2f_nanpos(): def test_a2f_bad_scaling(): # Test that pathological scalers raise an error - NUMERICAL_TYPES = sum([np.sctypes[key] for key in ['int', - 'uint', - 'float', - 'complex']], - []) + NUMERICAL_TYPES = sum( + [np.sctypes[key] for key in ["int", "uint", "float", "complex"]], [] + ) for in_type, out_type, slope, inter in itertools.product( - NUMERICAL_TYPES, - NUMERICAL_TYPES, - (None, 1, 0, np.nan, -np.inf, np.inf), - (0, np.nan, -np.inf, np.inf)): + NUMERICAL_TYPES, + NUMERICAL_TYPES, + (None, 1, 0, np.nan, -np.inf, np.inf), + (0, np.nan, -np.inf, np.inf), + ): arr = np.ones((2,), dtype=in_type) fobj = BytesIO() if (slope, inter) == (1, 0): - assert_array_equal(arr, - write_return(arr, fobj, out_type, - intercept=inter, - divslope=slope)) + assert_array_equal( + arr, write_return(arr, fobj, out_type, intercept=inter, divslope=slope) + ) elif (slope, inter) == (None, 0): - assert_array_equal(0, - write_return(arr, fobj, out_type, - intercept=inter, - divslope=slope)) + assert_array_equal( + 0, write_return(arr, fobj, out_type, intercept=inter, divslope=slope) + ) else: with pytest.raises(ValueError): - array_to_file(arr, - fobj, - np.int8, - intercept=inter, - divslope=slope) + array_to_file(arr, fobj, np.int8, intercept=inter, divslope=slope) def test_a2f_nan2zero_range(): @@ -648,8 +655,9 @@ def test_a2f_nan2zero_range(): # Pushing zero outside the output data range does not generate error back_arr = write_return(arr_no_nan, fobj, np.int8, intercept=129, nan2zero=True) assert_array_equal([-128, -128, -128, -127], back_arr) - back_arr = write_return(arr_no_nan, fobj, np.int8, - intercept=257.1, divslope=2, nan2zero=True) + back_arr = write_return( + arr_no_nan, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=True + ) assert_array_equal([-128, -128, -128, -128], back_arr) for dt in CFLOAT_TYPES: arr = np.array([-1, 0, 1, np.nan], dtype=dt) @@ -657,11 +665,9 @@ def test_a2f_nan2zero_range(): arr_no_nan = np.array([-1, 0, 1, 2], dtype=dt) # No errors from explicit thresholding # mn thresholding excluding zero - assert_array_equal([1, 1, 1, 0], - write_return(arr, fobj, np.int8, mn=1)) + assert_array_equal([1, 1, 1, 0], write_return(arr, fobj, np.int8, mn=1)) # mx thresholding excluding zero - assert_array_equal([-1, -1, -1, 0], - write_return(arr, fobj, np.int8, mx=-1)) + assert_array_equal([-1, -1, -1, 0], write_return(arr, fobj, np.int8, mx=-1)) # Errors from datatype threshold after scaling back_arr = write_return(arr, fobj, np.int8, intercept=128) assert_array_equal([-128, -128, -127, -128], back_arr) @@ -681,17 +687,18 @@ def test_a2f_nan2zero_range(): with pytest.raises(ValueError): write_return(arr_no_nan, fobj, np.int8, intercept=257.1, divslope=2) # OK with nan2zero false - back_arr = write_return(arr, fobj, np.int8, - intercept=257.1, divslope=2, nan2zero=False) + back_arr = write_return( + arr, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=False + ) assert_array_equal([-128, -128, -128, nan_cast], back_arr) def test_a2f_non_numeric(): # Reminder that we may get structured dtypes - dt = np.dtype([('f1', 'f'), ('f2', 'i2')]) + dt = np.dtype([("f1", "f"), ("f2", "i2")]) arr = np.zeros((2,), dtype=dt) - arr['f1'] = 0.4, 0.6 - arr['f2'] = 10, 12 + arr["f1"] = 0.4, 0.6 + arr["f2"] = 10, 12 fobj = BytesIO() back_arr = write_return(arr, fobj, dt) assert_array_equal(back_arr, arr) @@ -732,17 +739,17 @@ def test_apply_scaling(): assert (f32_arr * f64(1)).dtype == np.float32 assert (f32_arr + f64(1)).dtype == np.float32 # The function does upcast though - ret=apply_read_scaling(np.float32(0), np.float64(2)) + ret = apply_read_scaling(np.float32(0), np.float64(2)) assert ret.dtype == np.float64 - ret=apply_read_scaling(np.float32(0), inter=np.float64(2)) + ret = apply_read_scaling(np.float32(0), inter=np.float64(2)) assert ret.dtype == np.float64 # Check integer inf upcast - big=f32(type_info(f32)['max']) + big = f32(type_info(f32)["max"]) # Normally this would not upcast assert (i16_arr * big).dtype == np.float32 # An equivalent case is a little hard to find for the intercept - nmant_32=type_info(np.float32)['nmant'] - big_delta=np.float32(2**(floor_log2(big) - nmant_32)) + nmant_32 = type_info(np.float32)["nmant"] + big_delta = np.float32(2 ** (floor_log2(big) - nmant_32)) assert (i16_arr * big_delta + big).dtype == np.float32 # Upcasting does occur with this routine assert apply_read_scaling(i16_arr, big).dtype == np.float64 @@ -756,15 +763,17 @@ def test_apply_scaling(): assert apply_read_scaling(np.int8(0), f32(-1e38), f32(0.0)).dtype == np.float64 # Non-zero intercept still generates floats assert_dt_equal(apply_read_scaling(i16_arr, 1.0, 1.0).dtype, float) - assert_dt_equal(apply_read_scaling( - np.zeros((1,), dtype=np.int32), 1.0, 1.0).dtype, float) - assert_dt_equal(apply_read_scaling( - np.zeros((1,), dtype=np.int64), 1.0, 1.0).dtype, float) + assert_dt_equal( + apply_read_scaling(np.zeros((1,), dtype=np.int32), 1.0, 1.0).dtype, float + ) + assert_dt_equal( + apply_read_scaling(np.zeros((1,), dtype=np.int64), 1.0, 1.0).dtype, float + ) def test_apply_read_scaling_ints(): # Test that apply_read_scaling copes with integer scaling inputs - arr=np.arange(10, dtype=np.int16) + arr = np.arange(10, dtype=np.int16) assert_array_equal(apply_read_scaling(arr, 1, 0), arr) assert_array_equal(apply_read_scaling(arr, 1, 1), arr + 1) assert_array_equal(apply_read_scaling(arr, 2, 1), arr * 2 + 1) @@ -772,7 +781,7 @@ def test_apply_read_scaling_ints(): def test_apply_read_scaling_nones(): # Check that we can pass None as slope and inter to apply read scaling - arr=np.arange(10, dtype=np.int16) + arr = np.arange(10, dtype=np.int16) assert_array_equal(apply_read_scaling(arr, None, None), arr) assert_array_equal(apply_read_scaling(arr, 2, None), arr * 2) assert_array_equal(apply_read_scaling(arr, None, 1), arr + 1) @@ -792,50 +801,52 @@ def test_working_type(): # need this because of the very confusing np.int32 != np.intp (on 32 bit). def wt(*args, **kwargs): return np.dtype(working_type(*args, **kwargs)).str - d1=np.atleast_1d + + d1 = np.atleast_1d for in_type in NUMERIC_TYPES: - in_ts=np.dtype(in_type).str + in_ts = np.dtype(in_type).str assert wt(in_type) == in_ts assert wt(in_type, 1, 0) == in_ts assert wt(in_type, 1.0, 0.0) == in_ts - in_val=d1(in_type(0)) + in_val = d1(in_type(0)) for slope_type in NUMERIC_TYPES: - sl_val=slope_type(1) # no scaling, regardless of type + sl_val = slope_type(1) # no scaling, regardless of type assert wt(in_type, sl_val, 0.0) == in_ts - sl_val=slope_type(2) # actual scaling - out_val=in_val / d1(sl_val) + sl_val = slope_type(2) # actual scaling + out_val = in_val / d1(sl_val) assert wt(in_type, sl_val) == out_val.dtype.str for inter_type in NUMERIC_TYPES: - i_val=inter_type(0) # no scaling, regardless of type + i_val = inter_type(0) # no scaling, regardless of type assert wt(in_type, 1, i_val) == in_ts - i_val=inter_type(1) # actual scaling - out_val=in_val - d1(i_val) + i_val = inter_type(1) # actual scaling + out_val = in_val - d1(i_val) assert wt(in_type, 1, i_val) == out_val.dtype.str # Combine scaling and intercept - out_val=(in_val - d1(i_val)) / d1(sl_val) + out_val = (in_val - d1(i_val)) / d1(sl_val) assert wt(in_type, sl_val, i_val) == out_val.dtype.str # Confirm that type codes and dtypes work as well - f32s=np.dtype(np.float32).str - assert wt('f4', 1, 0) == f32s - assert wt(np.dtype('f4'), 1, 0) == f32s + f32s = np.dtype(np.float32).str + assert wt("f4", 1, 0) == f32s + assert wt(np.dtype("f4"), 1, 0) == f32s def test_better_float(): # Better float function def check_against(f1, f2): return f1 if FLOAT_TYPES.index(f1) >= FLOAT_TYPES.index(f2) else f2 + for first in FLOAT_TYPES: - for other in IUINT_TYPES + np.sctypes['complex']: + for other in IUINT_TYPES + np.sctypes["complex"]: assert better_float_of(first, other) == first assert better_float_of(other, first) == first - for other2 in IUINT_TYPES + np.sctypes['complex']: + for other2 in IUINT_TYPES + np.sctypes["complex"]: assert better_float_of(other, other2) == np.float32 assert better_float_of(other, other2, np.float64) == np.float64 for second in FLOAT_TYPES: assert better_float_of(first, second) == check_against(first, second) # Check codes and dtypes work - assert better_float_of('f4', 'f8', 'f4') == np.float64 - assert better_float_of('i4', 'i8', 'f8') == np.float64 + assert better_float_of("f4", "f8", "f4") == np.float64 + assert better_float_of("i4", "i8", "f8") == np.float64 def test_best_write_scale_ftype(): @@ -843,23 +854,29 @@ def test_best_write_scale_ftype(): # Types return better of (default, array type) unless scale overflows. # Return float type cannot be less capable than the input array type for dtt in IUINT_TYPES + FLOAT_TYPES: - arr=np.arange(10, dtype=dtt) + arr = np.arange(10, dtype=dtt) assert best_write_scale_ftype(arr, 1, 0) == better_float_of(dtt, np.float32) - assert best_write_scale_ftype(arr, 1, 0, np.float64) == better_float_of(dtt, np.float64) - assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of(dtt, np.float32) - assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of(dtt, np.float32) + assert best_write_scale_ftype(arr, 1, 0, np.float64) == better_float_of( + dtt, np.float64 + ) + assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of( + dtt, np.float32 + ) + assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of( + dtt, np.float32 + ) # Overflowing ints with scaling results in upcast - best_vals=((np.float32, np.float64),) + best_vals = ((np.float32, np.float64),) if np.longdouble in OK_FLOATS: best_vals += ((np.float64, np.longdouble),) for lower_t, higher_t in best_vals: # Information on this float - L_info=type_info(lower_t) - t_max=L_info['max'] - nmant=L_info['nmant'] # number of significand digits - big_delta=lower_t(2**(floor_log2(t_max) - nmant)) # delta below max + L_info = type_info(lower_t) + t_max = L_info["max"] + nmant = L_info["nmant"] # number of significand digits + big_delta = lower_t(2 ** (floor_log2(t_max) - nmant)) # delta below max # Even large values that don't overflow don't change output - arr=np.array([0, t_max], dtype=lower_t) + arr = np.array([0, t_max], dtype=lower_t) assert best_write_scale_ftype(arr, 1, 0) == lower_t # Scaling > 1 reduces output values, so no upcast needed assert best_write_scale_ftype(arr, lower_t(1.01), 0) == lower_t @@ -869,28 +886,29 @@ def test_best_write_scale_ftype(): assert best_write_scale_ftype(arr, 1, -big_delta / 2.01) == lower_t assert best_write_scale_ftype(arr, 1, -big_delta / 2.0) == higher_t # With infs already in input, default type returns - arr[0]=np.inf + arr[0] = np.inf assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t - arr[0]=-np.inf + arr[0] = -np.inf assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t def test_can_cast(): - tests=((np.float32, np.float32, True, True, True), - (np.float64, np.float32, True, True, True), - (np.complex128, np.float32, False, False, False), - (np.float32, np.complex128, True, True, True), - (np.float32, np.uint8, False, True, True), - (np.uint32, np.complex128, True, True, True), - (np.int64, np.float32, True, True, True), - (np.complex128, np.int16, False, False, False), - (np.float32, np.int16, False, True, True), - (np.uint8, np.int16, True, True, True), - (np.uint16, np.int16, False, True, True), - (np.int16, np.uint16, False, False, True), - (np.int8, np.uint16, False, False, True), - (np.uint16, np.uint8, False, True, True), - ) + tests = ( + (np.float32, np.float32, True, True, True), + (np.float64, np.float32, True, True, True), + (np.complex128, np.float32, False, False, False), + (np.float32, np.complex128, True, True, True), + (np.float32, np.uint8, False, True, True), + (np.uint32, np.complex128, True, True, True), + (np.int64, np.float32, True, True, True), + (np.complex128, np.int16, False, False, False), + (np.float32, np.int16, False, True, True), + (np.uint8, np.int16, True, True, True), + (np.uint16, np.int16, False, True, True), + (np.int16, np.uint16, False, False, True), + (np.int8, np.uint16, False, False, True), + (np.uint16, np.uint8, False, True, True), + ) for intype, outtype, def_res, scale_res, all_res in tests: assert def_res == can_cast(intype, outtype) assert scale_res == can_cast(intype, outtype, False, True) @@ -898,54 +916,54 @@ def test_can_cast(): def test_write_zeros(): - bio=BytesIO() + bio = BytesIO() write_zeros(bio, 10000) - assert bio.getvalue() == b'\x00' * 10000 + assert bio.getvalue() == b"\x00" * 10000 bio.seek(0) bio.truncate(0) write_zeros(bio, 10000, 256) - assert bio.getvalue() == b'\x00' * 10000 + assert bio.getvalue() == b"\x00" * 10000 bio.seek(0) bio.truncate(0) write_zeros(bio, 200, 256) - assert bio.getvalue() == b'\x00' * 200 + assert bio.getvalue() == b"\x00" * 200 def test_seek_tell(): # Test seek tell routine - bio=BytesIO() - in_files=bio, 'test.bin', 'test.gz', 'test.bz2' - start=10 - end=100 - diff=end - start - tail=7 + bio = BytesIO() + in_files = bio, "test.bin", "test.gz", "test.bz2" + start = 10 + end = 100 + diff = end - start + tail = 7 with InTemporaryDirectory(): for in_file, write0 in itertools.product(in_files, (False, True)): - st=functools.partial(seek_tell, write0=write0) + st = functools.partial(seek_tell, write0=write0) bio.seek(0) # First write the file - with ImageOpener(in_file, 'wb') as fobj: + with ImageOpener(in_file, "wb") as fobj: assert fobj.tell() == 0 # already at position - OK st(fobj, 0) assert fobj.tell() == 0 # Move position by writing - fobj.write(b'\x01' * start) + fobj.write(b"\x01" * start) assert fobj.tell() == start # Files other than BZ2Files can seek forward on write, leaving # zeros in their wake. BZ2Files can't seek when writing, unless # we enable the write0 flag to seek_tell - if not write0 and in_file == 'test.bz2': # Can't seek write in bz2 + if not write0 and in_file == "test.bz2": # Can't seek write in bz2 # write the zeros by hand for the read test below - fobj.write(b'\x00' * diff) + fobj.write(b"\x00" * diff) else: st(fobj, end) assert fobj.tell() == end # Write tail - fobj.write(b'\x02' * tail) + fobj.write(b"\x02" * tail) bio.seek(0) # Now read back the file testing seek_tell in reading mode - with ImageOpener(in_file, 'rb') as fobj: + with ImageOpener(in_file, "rb") as fobj: assert fobj.tell() == 0 st(fobj, 0) assert fobj.tell() == 0 @@ -957,42 +975,42 @@ def test_seek_tell(): st(fobj, 0) bio.seek(0) # Check we have the expected written output - with ImageOpener(in_file, 'rb') as fobj: - assert fobj.read() == b'\x01' * start + b'\x00' * diff + b'\x02' * tail - for in_file in ('test2.gz', 'test2.bz2'): + with ImageOpener(in_file, "rb") as fobj: + assert fobj.read() == b"\x01" * start + b"\x00" * diff + b"\x02" * tail + for in_file in ("test2.gz", "test2.bz2"): # Check failure of write seek backwards - with ImageOpener(in_file, 'wb') as fobj: - fobj.write(b'g' * 10) + with ImageOpener(in_file, "wb") as fobj: + fobj.write(b"g" * 10) assert fobj.tell() == 10 seek_tell(fobj, 10) assert fobj.tell() == 10 with pytest.raises(IOError): seek_tell(fobj, 5) # Make sure read seeks don't affect file - with ImageOpener(in_file, 'rb') as fobj: + with ImageOpener(in_file, "rb") as fobj: seek_tell(fobj, 10) seek_tell(fobj, 0) - with ImageOpener(in_file, 'rb') as fobj: - assert fobj.read() == b'g' * 10 + with ImageOpener(in_file, "rb") as fobj: + assert fobj.read() == b"g" * 10 def test_seek_tell_logic(): # Test logic of seek_tell write0 with dummy class # Seek works? OK - bio=BytesIO() + bio = BytesIO() seek_tell(bio, 10) assert bio.tell() == 10 class BabyBio(BytesIO): - def seek(self, *args): raise IOError() - bio=BabyBio() + + bio = BabyBio() # Fresh fileobj, position 0, can't seek - error with pytest.raises(IOError): bio.seek(10) # Put fileobj in correct position by writing - ZEROB=b'\x00' + ZEROB = b"\x00" bio.write(ZEROB * 10) seek_tell(bio, 10) # already there, nothing to do assert bio.tell() == 10 @@ -1007,110 +1025,123 @@ def seek(self, *args): def test_fname_ext_ul_case(): # Get filename ignoring the case of the filename extension with InTemporaryDirectory(): - with open('afile.TXT', 'wt') as fobj: - fobj.write('Interesting information') + with open("afile.TXT", "wt") as fobj: + fobj.write("Interesting information") # OSX usually has case-insensitive file systems; Windows also - os_cares_case=not exists('afile.txt') - with open('bfile.txt', 'wt') as fobj: - fobj.write('More interesting information') + os_cares_case = not exists("afile.txt") + with open("bfile.txt", "wt") as fobj: + fobj.write("More interesting information") # If there is no file, the case doesn't change - assert fname_ext_ul_case('nofile.txt') == 'nofile.txt' - assert fname_ext_ul_case('nofile.TXT') == 'nofile.TXT' + assert fname_ext_ul_case("nofile.txt") == "nofile.txt" + assert fname_ext_ul_case("nofile.TXT") == "nofile.TXT" # If there is a file, accept upper or lower case for ext if os_cares_case: - assert fname_ext_ul_case('afile.txt') == 'afile.TXT' - assert fname_ext_ul_case('bfile.TXT') == 'bfile.txt' + assert fname_ext_ul_case("afile.txt") == "afile.TXT" + assert fname_ext_ul_case("bfile.TXT") == "bfile.txt" else: - assert fname_ext_ul_case('afile.txt') == 'afile.txt' - assert fname_ext_ul_case('bfile.TXT') == 'bfile.TXT' - assert fname_ext_ul_case('afile.TXT') == 'afile.TXT' - assert fname_ext_ul_case('bfile.txt') == 'bfile.txt' + assert fname_ext_ul_case("afile.txt") == "afile.txt" + assert fname_ext_ul_case("bfile.TXT") == "bfile.TXT" + assert fname_ext_ul_case("afile.TXT") == "afile.TXT" + assert fname_ext_ul_case("bfile.txt") == "bfile.txt" # Not mixed case though - assert fname_ext_ul_case('afile.TxT') == 'afile.TxT' + assert fname_ext_ul_case("afile.TxT") == "afile.TxT" def test_allopen(): # This import into volumeutils is for compatibility. The code is the # ``openers`` module. with clear_and_catch_warnings() as w: - warnings.filterwarnings('once', category=DeprecationWarning) + warnings.filterwarnings("once", category=DeprecationWarning) # Test default mode is 'rb' - fobj=allopen(__file__) + fobj = allopen(__file__) # Check we got the deprecation warning assert len(w) == 1 - assert fobj.mode == 'rb' + assert fobj.mode == "rb" # That we can set it - fobj=allopen(__file__, 'r') - assert fobj.mode == 'r' + fobj = allopen(__file__, "r") + assert fobj.mode == "r" # with keyword arguments - fobj=allopen(__file__, mode='r') - assert fobj.mode == 'r' + fobj = allopen(__file__, mode="r") + assert fobj.mode == "r" # fileobj returns fileobj - msg=b'tiddle pom' - sobj=BytesIO(msg) - fobj=allopen(sobj) + msg = b"tiddle pom" + sobj = BytesIO(msg) + fobj = allopen(sobj) assert fobj.read() == msg # mode is gently ignored - fobj=allopen(sobj, mode='r') + fobj = allopen(sobj, mode="r") def test_allopen_compresslevel(): # We can set the default compression level with the module global # Get some data to compress - with open(__file__, 'rb') as fobj: - my_self=fobj.read() + with open(__file__, "rb") as fobj: + my_self = fobj.read() # Prepare loop - fname='test.gz' - sizes={} + fname = "test.gz" + sizes = {} # Stash module global from .. import volumeutils as vu - original_compress_level=vu.default_compresslevel + + original_compress_level = vu.default_compresslevel assert original_compress_level == 1 try: with InTemporaryDirectory(): - for compresslevel in ('default', 1, 9): - if compresslevel != 'default': - vu.default_compresslevel=compresslevel + for compresslevel in ("default", 1, 9): + if compresslevel != "default": + vu.default_compresslevel = compresslevel with warnings.catch_warnings(): warnings.simplefilter("ignore") - with allopen(fname, 'wb') as fobj: + with allopen(fname, "wb") as fobj: fobj.write(my_self) - with open(fname, 'rb') as fobj: - my_selves_smaller=fobj.read() - sizes[compresslevel]=len(my_selves_smaller) - assert sizes['default'] == sizes[1] + with open(fname, "rb") as fobj: + my_selves_smaller = fobj.read() + sizes[compresslevel] = len(my_selves_smaller) + assert sizes["default"] == sizes[1] assert sizes[1] > sizes[9] finally: - vu.default_compresslevel=original_compress_level + vu.default_compresslevel = original_compress_level def test_shape_zoom_affine(): - shape=(3, 5, 7) - zooms=(3, 2, 1) - res=shape_zoom_affine(shape, zooms) - exp=np.array([[-3., 0., 0., 3.], - [0., 2., 0., -4.], - [0., 0., 1., -3.], - [0., 0., 0., 1.]]) + shape = (3, 5, 7) + zooms = (3, 2, 1) + res = shape_zoom_affine(shape, zooms) + exp = np.array( + [ + [-3.0, 0.0, 0.0, 3.0], + [0.0, 2.0, 0.0, -4.0], + [0.0, 0.0, 1.0, -3.0], + [0.0, 0.0, 0.0, 1.0], + ] + ) assert_array_almost_equal(res, exp) - res=shape_zoom_affine((3, 5), (3, 2)) - exp=np.array([[-3., 0., 0., 3.], - [0., 2., 0., -4.], - [0., 0., 1., -0.], - [0., 0., 0., 1.]]) + res = shape_zoom_affine((3, 5), (3, 2)) + exp = np.array( + [ + [-3.0, 0.0, 0.0, 3.0], + [0.0, 2.0, 0.0, -4.0], + [0.0, 0.0, 1.0, -0.0], + [0.0, 0.0, 0.0, 1.0], + ] + ) assert_array_almost_equal(res, exp) - res=shape_zoom_affine(shape, zooms, False) - exp=np.array([[3., 0., 0., -3.], - [0., 2., 0., -4.], - [0., 0., 1., -3.], - [0., 0., 0., 1.]]) + res = shape_zoom_affine(shape, zooms, False) + exp = np.array( + [ + [3.0, 0.0, 0.0, -3.0], + [0.0, 2.0, 0.0, -4.0], + [0.0, 0.0, 1.0, -3.0], + [0.0, 0.0, 0.0, 1.0], + ] + ) assert_array_almost_equal(res, exp) def test_rec2dict(): - r=np.zeros((), dtype=[('x', 'i4'), ('s', 'S10')]) - d=rec2dict(r) - assert d == {'x': 0, 's': b''} + r = np.zeros((), dtype=[("x", "i4"), ("s", "S10")]) + d = rec2dict(r) + assert d == {"x": 0, "s": b""} def test_dtypes(): @@ -1121,127 +1152,156 @@ def test_dtypes(): # In [10]: dtype(' Date: Mon, 11 Nov 2019 16:08:53 -0500 Subject: [PATCH 372/689] fixed spacing --- nibabel/tests/test_volumeutils.py | 65 +++++++------------------------ 1 file changed, 14 insertions(+), 51 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 34c2196b93..9f7bda3350 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -99,9 +99,7 @@ def make_array(n, bytes): # Check whether file, gzip file, bz2 file reread memory from cache fname = "test.bin" with InTemporaryDirectory(): - for n, opener in itertools.product( - (256, 1024, 2560, 25600), (open, gzip.open, BZ2File) - ): + for n, opener in itertools.product((256, 1024, 2560, 25600), (open, gzip.open, BZ2File)): in_arr = np.arange(n, dtype=dtype) # Write array to file fobj_w = opener(fname, "wb") @@ -297,9 +295,7 @@ def test_array_to_file(): ndt = dt.newbyteorder(code) for allow_intercept in (True, False): with suppress_warnings(): # deprecated - scale, intercept, mn, mx = calculate_scale( - arr, ndt, allow_intercept - ) + scale, intercept, mn, mx = calculate_scale(arr, ndt, allow_intercept) data_back = write_return(arr, str_io, ndt, 0, intercept, scale) assert_array_almost_equal(arr, data_back) # Test array-like @@ -332,13 +328,7 @@ def test_a2f_upscale(): str_io = BytesIO() # We need to provide mn, mx for function to be able to calculate upcasting array_to_file( - arr, - str_io, - np.uint8, - intercept=inter, - divslope=slope, - mn=info["min"], - mx=info["max"], + arr, str_io, np.uint8, intercept=inter, divslope=slope, mn=info["min"], mx=info["max"], ) raw = array_from_file(arr.shape, np.uint8, str_io) back = apply_read_scaling(raw, slope, inter) @@ -495,9 +485,7 @@ def test_a2f_big_scalers(): # We need nan2zero=False because we can't represent 0 in the input, given # the scaling and the output range. with suppress_warnings(): # overflow - array_to_file( - arr, str_io, np.int8, intercept=np.float32(2 ** 120), nan2zero=False - ) + array_to_file(arr, str_io, np.int8, intercept=np.float32(2 ** 120), nan2zero=False) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, -128, 127]) # Scales also if mx, mn specified? Same notes and complaints as for the test @@ -522,9 +510,7 @@ def test_a2f_big_scalers(): assert_array_equal(data_back, [-128, 0, 127]) # with mn, mx specified? str_io.seek(0) - array_to_file( - arr, str_io, np.int8, mn=info["min"], mx=info["max"], divslope=np.float32(0.5) - ) + array_to_file(arr, str_io, np.int8, mn=info["min"], mx=info["max"], divslope=np.float32(0.5)) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, 0, 127]) @@ -562,11 +548,7 @@ def test_a2f_scaled_unscaled(): if in_dtype in CFLOAT_TYPES and not mn_out <= nan_fill <= mx_out: with pytest.raises(ValueError): array_to_file( - arr, - fobj, - out_dtype=out_dtype, - divslope=divslope, - intercept=intercept, + arr, fobj, out_dtype=out_dtype, divslope=divslope, intercept=intercept, ) continue with suppress_warnings(): @@ -614,9 +596,7 @@ def test_a2f_nanpos(): def test_a2f_bad_scaling(): # Test that pathological scalers raise an error - NUMERICAL_TYPES = sum( - [np.sctypes[key] for key in ["int", "uint", "float", "complex"]], [] - ) + NUMERICAL_TYPES = sum([np.sctypes[key] for key in ["int", "uint", "float", "complex"]], []) for in_type, out_type, slope, inter in itertools.product( NUMERICAL_TYPES, NUMERICAL_TYPES, @@ -687,9 +667,7 @@ def test_a2f_nan2zero_range(): with pytest.raises(ValueError): write_return(arr_no_nan, fobj, np.int8, intercept=257.1, divslope=2) # OK with nan2zero false - back_arr = write_return( - arr, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=False - ) + back_arr = write_return(arr, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=False) assert_array_equal([-128, -128, -128, nan_cast], back_arr) @@ -763,12 +741,8 @@ def test_apply_scaling(): assert apply_read_scaling(np.int8(0), f32(-1e38), f32(0.0)).dtype == np.float64 # Non-zero intercept still generates floats assert_dt_equal(apply_read_scaling(i16_arr, 1.0, 1.0).dtype, float) - assert_dt_equal( - apply_read_scaling(np.zeros((1,), dtype=np.int32), 1.0, 1.0).dtype, float - ) - assert_dt_equal( - apply_read_scaling(np.zeros((1,), dtype=np.int64), 1.0, 1.0).dtype, float - ) + assert_dt_equal(apply_read_scaling(np.zeros((1,), dtype=np.int32), 1.0, 1.0).dtype, float) + assert_dt_equal(apply_read_scaling(np.zeros((1,), dtype=np.int64), 1.0, 1.0).dtype, float) def test_apply_read_scaling_ints(): @@ -856,15 +830,9 @@ def test_best_write_scale_ftype(): for dtt in IUINT_TYPES + FLOAT_TYPES: arr = np.arange(10, dtype=dtt) assert best_write_scale_ftype(arr, 1, 0) == better_float_of(dtt, np.float32) - assert best_write_scale_ftype(arr, 1, 0, np.float64) == better_float_of( - dtt, np.float64 - ) - assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of( - dtt, np.float32 - ) - assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of( - dtt, np.float32 - ) + assert best_write_scale_ftype(arr, 1, 0, np.float64) == better_float_of(dtt, np.float64) + assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of(dtt, np.float32) + assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of(dtt, np.float32) # Overflowing ints with scaling results in upcast best_vals = ((np.float32, np.float64),) if np.longdouble in OK_FLOATS: @@ -1248,12 +1216,7 @@ def assert_rt( # check defense against modifying data in-place for in_cast, pre_clips, inter, slope, post_clips, nan_fill in itp( - (None, np.float32), - (None, (-1, 25)), - (0.0, 1.0), - (1.0, 0.5), - (None, (-2, 49)), - (None, 1), + (None, np.float32), (None, (-1, 25)), (0.0, 1.0), (1.0, 0.5), (None, (-2, 49)), (None, 1), ): data = np.arange(24).astype(np.float32) assert_rt( From 67eb65efe22127d8714ea972baadba22ca712363 Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 11 Nov 2019 16:14:19 -0500 Subject: [PATCH 373/689] fixed spacing --- nibabel/tests/test_volumeutils.py | 32 ++++--------------------------- 1 file changed, 4 insertions(+), 28 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 9f7bda3350..d2434dc331 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -1076,32 +1076,17 @@ def test_shape_zoom_affine(): zooms = (3, 2, 1) res = shape_zoom_affine(shape, zooms) exp = np.array( - [ - [-3.0, 0.0, 0.0, 3.0], - [0.0, 2.0, 0.0, -4.0], - [0.0, 0.0, 1.0, -3.0], - [0.0, 0.0, 0.0, 1.0], - ] + [[-3.0, 0.0, 0.0, 3.0], [0.0, 2.0, 0.0, -4.0], [0.0, 0.0, 1.0, -3.0], [0.0, 0.0, 0.0, 1.0],] ) assert_array_almost_equal(res, exp) res = shape_zoom_affine((3, 5), (3, 2)) exp = np.array( - [ - [-3.0, 0.0, 0.0, 3.0], - [0.0, 2.0, 0.0, -4.0], - [0.0, 0.0, 1.0, -0.0], - [0.0, 0.0, 0.0, 1.0], - ] + [[-3.0, 0.0, 0.0, 3.0], [0.0, 2.0, 0.0, -4.0], [0.0, 0.0, 1.0, -0.0], [0.0, 0.0, 0.0, 1.0],] ) assert_array_almost_equal(res, exp) res = shape_zoom_affine(shape, zooms, False) exp = np.array( - [ - [3.0, 0.0, 0.0, -3.0], - [0.0, 2.0, 0.0, -4.0], - [0.0, 0.0, 1.0, -3.0], - [0.0, 0.0, 0.0, 1.0], - ] + [[3.0, 0.0, 0.0, -3.0], [0.0, 2.0, 0.0, -4.0], [0.0, 0.0, 1.0, -3.0], [0.0, 0.0, 0.0, 1.0],] ) assert_array_almost_equal(res, exp) @@ -1179,16 +1164,7 @@ def assert_rt( if have_nans and nan_fill is None and not out_dtype.type == "f": raise ValueError("Cannot handle this case") _write_data( - to_write, - sio, - out_dtype, - order, - in_cast, - pre_clips, - inter, - slope, - post_clips, - nan_fill, + to_write, sio, out_dtype, order, in_cast, pre_clips, inter, slope, post_clips, nan_fill, ) arr = np.ndarray(shape, out_dtype, buffer=sio.getvalue(), order=order) expected = to_write.copy() From d6aa52dbe08961b4afd78f1c84bcbf823aa30632 Mon Sep 17 00:00:00 2001 From: Chris Gorgolewski Date: Mon, 11 Nov 2019 16:28:56 -0500 Subject: [PATCH 374/689] fixed spacing, revert --- nibabel/tests/test_volumeutils.py | 697 +++++++++++++++--------------- 1 file changed, 349 insertions(+), 348 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index d2434dc331..19e5f3bd27 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -""" Test for volumeutils module """ +''' Test for volumeutils module ''' import os from os.path import exists @@ -27,60 +27,54 @@ from ..tmpdirs import InTemporaryDirectory from ..openers import ImageOpener from .. import volumeutils -from ..volumeutils import ( - array_from_file, - _is_compressed_fobj, - array_to_file, - allopen, # for backwards compatibility - fname_ext_ul_case, - calculate_scale, - can_cast, - write_zeros, - seek_tell, - apply_read_scaling, - working_type, - best_write_scale_ftype, - better_float_of, - int_scinter_ftype, - make_dt_codes, - native_code, - shape_zoom_affine, - rec2dict, - _dt_min_max, - _write_data, - _ftype4scaled_finite, -) +from ..volumeutils import (array_from_file, + _is_compressed_fobj, + array_to_file, + allopen, # for backwards compatibility + fname_ext_ul_case, + calculate_scale, + can_cast, + write_zeros, + seek_tell, + apply_read_scaling, + working_type, + best_write_scale_ftype, + better_float_of, + int_scinter_ftype, + make_dt_codes, + native_code, + shape_zoom_affine, + rec2dict, + _dt_min_max, + _write_data, + _ftype4scaled_finite, + ) from ..openers import Opener, BZ2File -from ..casting import floor_log2, type_info, OK_FLOATS, shared_range +from ..casting import (floor_log2, type_info, OK_FLOATS, shared_range) -from numpy.testing import assert_array_almost_equal, assert_array_equal +from numpy.testing import (assert_array_almost_equal, + assert_array_equal) -from ..testing_pytest import ( - assert_dt_equal, - assert_allclose_safely, - suppress_warnings, - clear_and_catch_warnings, -) +from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, + suppress_warnings, clear_and_catch_warnings) #: convenience variables for numpy types -FLOAT_TYPES = np.sctypes["float"] -COMPLEX_TYPES = np.sctypes["complex"] +FLOAT_TYPES = np.sctypes['float'] +COMPLEX_TYPES = np.sctypes['complex'] CFLOAT_TYPES = FLOAT_TYPES + COMPLEX_TYPES -INT_TYPES = np.sctypes["int"] -IUINT_TYPES = INT_TYPES + np.sctypes["uint"] +INT_TYPES = np.sctypes['int'] +IUINT_TYPES = INT_TYPES + np.sctypes['uint'] NUMERIC_TYPES = CFLOAT_TYPES + IUINT_TYPES def test__is_compressed_fobj(): # _is_compressed helper function with InTemporaryDirectory(): - for ext, opener, compressed in ( - ("", open, False), - (".gz", gzip.open, True), - (".bz2", BZ2File, True), - ): - fname = "test.bin" + ext - for mode in ("wb", "rb"): + for ext, opener, compressed in (('', open, False), + ('.gz', gzip.open, True), + ('.bz2', BZ2File, True)): + fname = 'test.bin' + ext + for mode in ('wb', 'rb'): fobj = opener(fname, mode) assert _is_compressed_fobj(fobj) == compressed fobj.close() @@ -97,27 +91,29 @@ def make_array(n, bytes): return arr # Check whether file, gzip file, bz2 file reread memory from cache - fname = "test.bin" + fname = 'test.bin' with InTemporaryDirectory(): - for n, opener in itertools.product((256, 1024, 2560, 25600), (open, gzip.open, BZ2File)): + for n, opener in itertools.product( + (256, 1024, 2560, 25600), + (open, gzip.open, BZ2File)): in_arr = np.arange(n, dtype=dtype) # Write array to file - fobj_w = opener(fname, "wb") + fobj_w = opener(fname, 'wb') fobj_w.write(in_arr.tostring()) fobj_w.close() # Read back from file - fobj_r = opener(fname, "rb") + fobj_r = opener(fname, 'rb') try: contents1 = bytearray(4 * n) fobj_r.readinto(contents1) # Second element is 1 - assert contents1[0:8] != b"\x00" * 8 + assert contents1[0:8] != b'\x00' * 8 out_arr = make_array(n, contents1) assert_array_equal(in_arr, out_arr) # Set second element to 0 out_arr[1] = 0 # Show this changed the bytes string - assert contents1[:8] == b"\x00" * 8 + assert contents1[:8] == b'\x00' * 8 # Reread, to get unmodified contents fobj_r.seek(0) contents2 = bytearray(4 * n) @@ -140,11 +136,11 @@ def test_array_from_file(): offset = 10 assert buf_chk(in_arr, BytesIO(), None, offset) # check on real file - fname = "test.bin" + fname = 'test.bin' with InTemporaryDirectory(): # fortran ordered - out_buf = open(fname, "wb") - in_buf = open(fname, "rb") + out_buf = open(fname, 'wb') + in_buf = open(fname, 'rb') assert buf_chk(in_arr, out_buf, in_buf, offset) # Drop offset to check that shape's not coming from file length out_buf.seek(0) @@ -153,9 +149,9 @@ def test_array_from_file(): assert buf_chk(in_arr, out_buf, in_buf, offset) del out_buf, in_buf # Make sure empty shape, and zero length, give empty arrays - arr = array_from_file((), np.dtype("f8"), BytesIO()) + arr = array_from_file((), np.dtype('f8'), BytesIO()) assert len(arr) == 0 - arr = array_from_file((0,), np.dtype("f8"), BytesIO()) + arr = array_from_file((0,), np.dtype('f8'), BytesIO()) assert len(arr) == 0 # Check error from small file with pytest.raises(IOError): @@ -163,8 +159,8 @@ def test_array_from_file(): # check on real file fd, fname = tempfile.mkstemp() with InTemporaryDirectory(): - open(fname, "wb").write(b"1") - in_buf = open(fname, "rb") + open(fname, 'wb').write(b'1') + in_buf = open(fname, 'rb') # For windows this will raise a WindowsError from mmap, Unices # appear to raise an IOError with pytest.raises(Exception): @@ -178,51 +174,55 @@ def test_array_from_file_mmap(): with InTemporaryDirectory(): for dt in (np.int16, np.float): arr = np.arange(np.prod(shape), dtype=dt).reshape(shape) - with open("test.bin", "wb") as fobj: - fobj.write(arr.tostring(order="F")) - with open("test.bin", "rb") as fobj: + with open('test.bin', 'wb') as fobj: + fobj.write(arr.tostring(order='F')) + with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj) assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == "c" - with open("test.bin", "rb") as fobj: + assert res.mode == 'c' + with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj, mmap=True) assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == "c" - with open("test.bin", "rb") as fobj: - res = array_from_file(shape, dt, fobj, mmap="c") + assert res.mode == 'c' + with open('test.bin', 'rb') as fobj: + res = array_from_file(shape, dt, fobj, mmap='c') assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == "c" - with open("test.bin", "rb") as fobj: - res = array_from_file(shape, dt, fobj, mmap="r") + assert res.mode == 'c' + with open('test.bin', 'rb') as fobj: + res = array_from_file(shape, dt, fobj, mmap='r') assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == "r" - with open("test.bin", "rb+") as fobj: - res = array_from_file(shape, dt, fobj, mmap="r+") + assert res.mode == 'r' + with open('test.bin', 'rb+') as fobj: + res = array_from_file(shape, dt, fobj, mmap='r+') assert_array_equal(res, arr) assert isinstance(res, np.memmap) - assert res.mode == "r+" - with open("test.bin", "rb") as fobj: + assert res.mode == 'r+' + with open('test.bin', 'rb') as fobj: res = array_from_file(shape, dt, fobj, mmap=False) assert_array_equal(res, arr) assert not isinstance(res, np.memmap) - with open("test.bin", "rb") as fobj: + with open('test.bin', 'rb') as fobj: with pytest.raises(ValueError): - array_from_file(shape, dt, fobj, mmap="p") + array_from_file(shape, dt, fobj, mmap='p') def buf_chk(in_arr, out_buf, in_buf, offset): - """ Write contents of in_arr into fileobj, read back, check same """ - instr = b" " * offset + in_arr.tostring(order="F") + ''' Write contents of in_arr into fileobj, read back, check same ''' + instr = b' ' * offset + in_arr.tostring(order='F') out_buf.write(instr) out_buf.flush() if in_buf is None: # we're using in_buf from out_buf out_buf.seek(0) in_buf = out_buf - arr = array_from_file(in_arr.shape, in_arr.dtype, in_buf, offset) + arr = array_from_file( + in_arr.shape, + in_arr.dtype, + in_buf, + offset) return np.allclose(in_arr, arr) @@ -232,13 +232,14 @@ def test_array_from_file_openers(): dtype = np.dtype(np.float32) in_arr = np.arange(24, dtype=dtype).reshape(shape) with InTemporaryDirectory(): - for ext, offset in itertools.product(("", ".gz", ".bz2"), (0, 5, 10)): - fname = "test.bin" + ext - with Opener(fname, "wb") as out_buf: + for ext, offset in itertools.product(('', '.gz', '.bz2'), + (0, 5, 10)): + fname = 'test.bin' + ext + with Opener(fname, 'wb') as out_buf: if offset != 0: # avoid https://bugs.python.org/issue16828 - out_buf.write(b" " * offset) - out_buf.write(in_arr.tostring(order="F")) - with Opener(fname, "rb") as in_buf: + out_buf.write(b' ' * offset) + out_buf.write(in_arr.tostring(order='F')) + with Opener(fname, 'rb') as in_buf: out_arr = array_from_file(shape, dtype, in_buf, offset) assert_array_almost_equal(in_arr, out_arr) # Delete object holding onto file for Windows @@ -250,26 +251,25 @@ def test_array_from_file_reread(): # This is the live check for the generic checks in # test_fobj_string_assumptions offset = 9 - fname = "test.bin" + fname = 'test.bin' with InTemporaryDirectory(): for shape, opener, dtt, order in itertools.product( - ((64,), (64, 65), (64, 65, 66)), - (open, gzip.open, bz2.BZ2File, BytesIO), - (np.int16, np.float32), - ("F", "C"), - ): + ((64,), (64, 65), (64, 65, 66)), + (open, gzip.open, bz2.BZ2File, BytesIO), + (np.int16, np.float32), + ('F', 'C')): n_els = np.prod(shape) in_arr = np.arange(n_els, dtype=dtt).reshape(shape) - is_bio = hasattr(opener, "getvalue") + is_bio = hasattr(opener, 'getvalue') # Write array to file - fobj_w = opener() if is_bio else opener(fname, "wb") - fobj_w.write(b" " * offset) + fobj_w = opener() if is_bio else opener(fname, 'wb') + fobj_w.write(b' ' * offset) fobj_w.write(in_arr.tostring(order=order)) if is_bio: fobj_r = fobj_w else: fobj_w.close() - fobj_r = opener(fname, "rb") + fobj_r = opener(fname, 'rb') # Read back from file try: out_arr = array_from_file(shape, dtt, fobj_r, offset, order) @@ -291,12 +291,14 @@ def test_array_to_file(): str_io = BytesIO() for tp in (np.uint64, np.float, np.complex): dt = np.dtype(tp) - for code in "<>": + for code in '<>': ndt = dt.newbyteorder(code) for allow_intercept in (True, False): with suppress_warnings(): # deprecated - scale, intercept, mn, mx = calculate_scale(arr, ndt, allow_intercept) - data_back = write_return(arr, str_io, ndt, 0, intercept, scale) + scale, intercept, mn, mx = \ + calculate_scale(arr, ndt, allow_intercept) + data_back = write_return(arr, str_io, ndt, + 0, intercept, scale) assert_array_almost_equal(arr, data_back) # Test array-like str_io = BytesIO() @@ -322,14 +324,13 @@ def test_a2f_upscale(): # Test values discovered from stress testing. The largish value (2**115) # overflows to inf after the intercept is subtracted, using float32 as the # working precision. The difference between inf and this value is lost. - arr = np.array([[info["min"], 2 ** 115, info["max"]]], dtype=np.float32) - slope = np.float32(2 ** 121) - inter = info["min"] + arr = np.array([[info['min'], 2**115, info['max']]], dtype=np.float32) + slope = np.float32(2**121) + inter = info['min'] str_io = BytesIO() # We need to provide mn, mx for function to be able to calculate upcasting - array_to_file( - arr, str_io, np.uint8, intercept=inter, divslope=slope, mn=info["min"], mx=info["max"], - ) + array_to_file(arr, str_io, np.uint8, intercept=inter, divslope=slope, + mn=info['min'], mx=info['max']) raw = array_from_file(arr.shape, np.uint8, str_io) back = apply_read_scaling(raw, slope, inter) top = back - arr @@ -344,11 +345,11 @@ def test_a2f_min_max(): for out_dt in (np.float32, np.int8): arr = np.arange(4, dtype=in_dt) # min thresholding - with np.errstate(invalid="ignore"): + with np.errstate(invalid='ignore'): data_back = write_return(arr, str_io, out_dt, 0, 0, 1, 1) assert_array_equal(data_back, [1, 1, 2, 3]) # max thresholding - with np.errstate(invalid="ignore"): + with np.errstate(invalid='ignore'): data_back = write_return(arr, str_io, out_dt, 0, 0, 1, None, 2) assert_array_equal(data_back, [0, 1, 2, 2]) # min max thresholding @@ -373,13 +374,13 @@ def test_a2f_order(): arr = np.array([0.0, 1.0, 2.0]) str_io = BytesIO() # order makes no difference in 1D case - data_back = write_return(arr, str_io, ndt, order="C") + data_back = write_return(arr, str_io, ndt, order='C') assert_array_equal(data_back, [0.0, 1.0, 2.0]) # but does in the 2D case arr = np.array([[0.0, 1.0], [2.0, 3.0]]) - data_back = write_return(arr, str_io, ndt, order="F") + data_back = write_return(arr, str_io, ndt, order='F') assert_array_equal(data_back, arr) - data_back = write_return(arr, str_io, ndt, order="C") + data_back = write_return(arr, str_io, ndt, order='C') assert_array_equal(data_back, arr.T) @@ -393,12 +394,12 @@ def test_a2f_nan2zero(): # True is the default, but just to show it's possible data_back = write_return(arr, str_io, ndt, nan2zero=True) assert_array_equal(data_back, arr) - with np.errstate(invalid="ignore"): + with np.errstate(invalid='ignore'): data_back = write_return(arr, str_io, np.int64, nan2zero=True) assert_array_equal(data_back, [[0, 0], [0, 0]]) # otherwise things get a bit weird; tidied here # How weird? Look at arr.astype(np.int64) - with np.errstate(invalid="ignore"): + with np.errstate(invalid='ignore'): data_back = write_return(arr, str_io, np.int64, nan2zero=False) assert_array_equal(data_back, arr.astype(np.int64)) @@ -417,16 +418,18 @@ def test_a2f_nan2zero_scaling(): # Array values including zero before scaling but not after bio = BytesIO() for in_dt, out_dt, zero_in, inter in itertools.product( - FLOAT_TYPES, IUINT_TYPES, (True, False), (0, -100) - ): + FLOAT_TYPES, + IUINT_TYPES, + (True, False), + (0, -100)): in_info = np.finfo(in_dt) out_info = np.iinfo(out_dt) - mx = min(in_info.max, out_info.max * 2.0, 2 ** 32) + inter + mx = min(in_info.max, out_info.max * 2., 2**32) + inter mn = 0 if zero_in or inter else 100 vals = [np.nan] + [mn, mx] nan_arr = np.array(vals, dtype=in_dt) zero_arr = np.nan_to_num(nan_arr) - with np.errstate(invalid="ignore"): + with np.errstate(invalid='ignore'): back_nan = write_return(nan_arr, bio, np.int64, intercept=inter) back_zero = write_return(zero_arr, bio, np.int64, intercept=inter) assert_array_equal(back_nan, back_zero) @@ -436,7 +439,7 @@ def test_a2f_offset(): # check that non-zero file offset works arr = np.array([[0.0, 1.0], [2.0, 3.0]]) str_io = BytesIO() - str_io.write(b"a" * 42) + str_io.write(b'a' * 42) array_to_file(arr, str_io, np.float, 42) data_back = array_from_file(arr.shape, np.float, str_io, 42) assert_array_equal(data_back, arr.astype(np.float)) @@ -478,28 +481,22 @@ def test_a2f_zeros(): def test_a2f_big_scalers(): # Check that clip works even for overflowing scalers / data info = type_info(np.float32) - arr = np.array([info["min"], 0, info["max"]], dtype=np.float32) + arr = np.array([info['min'], 0, info['max']], dtype=np.float32) str_io = BytesIO() # Intercept causes overflow - does routine scale correctly? # We check whether the routine correctly clips extreme values. # We need nan2zero=False because we can't represent 0 in the input, given # the scaling and the output range. with suppress_warnings(): # overflow - array_to_file(arr, str_io, np.int8, intercept=np.float32(2 ** 120), nan2zero=False) + array_to_file(arr, str_io, np.int8, intercept=np.float32(2**120), + nan2zero=False) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, -128, 127]) # Scales also if mx, mn specified? Same notes and complaints as for the test # above. str_io.seek(0) - array_to_file( - arr, - str_io, - np.int8, - mn=info["min"], - mx=info["max"], - intercept=np.float32(2 ** 120), - nan2zero=False, - ) + array_to_file(arr, str_io, np.int8, mn=info['min'], mx=info['max'], + intercept=np.float32(2**120), nan2zero=False) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, -128, 127]) # And if slope causes overflow? @@ -510,7 +507,8 @@ def test_a2f_big_scalers(): assert_array_equal(data_back, [-128, 0, 127]) # with mn, mx specified? str_io.seek(0) - array_to_file(arr, str_io, np.int8, mn=info["min"], mx=info["max"], divslope=np.float32(0.5)) + array_to_file(arr, str_io, np.int8, mn=info['min'], mx=info['max'], + divslope=np.float32(0.5)) data_back = array_from_file(arr.shape, np.int8, str_io) assert_array_equal(data_back, [-128, 0, 127]) @@ -520,13 +518,13 @@ def test_a2f_int_scaling(): arr = np.array([0, 1, 128, 255], dtype=np.uint8) fobj = BytesIO() back_arr = write_return(arr, fobj, np.uint8, intercept=1) - assert_array_equal(back_arr, np.clip(arr - 1.0, 0, 255)) + assert_array_equal(back_arr, np.clip(arr - 1., 0, 255)) back_arr = write_return(arr, fobj, np.uint8, divslope=2) - assert_array_equal(back_arr, np.round(np.clip(arr / 2.0, 0, 255))) + assert_array_equal(back_arr, np.round(np.clip(arr / 2., 0, 255))) back_arr = write_return(arr, fobj, np.uint8, intercept=1, divslope=2) - assert_array_equal(back_arr, np.round(np.clip((arr - 1.0) / 2.0, 0, 255))) + assert_array_equal(back_arr, np.round(np.clip((arr - 1.) / 2., 0, 255))) back_arr = write_return(arr, fobj, np.int16, intercept=1, divslope=2) - assert_array_equal(back_arr, np.round((arr - 1.0) / 2.0)) + assert_array_equal(back_arr, np.round((arr - 1.) / 2.)) def test_a2f_scaled_unscaled(): @@ -534,8 +532,10 @@ def test_a2f_scaled_unscaled(): # without scaling fobj = BytesIO() for in_dtype, out_dtype, intercept, divslope in itertools.product( - NUMERIC_TYPES, NUMERIC_TYPES, (0, 0.5, -1, 1), (1, 0.5, 2) - ): + NUMERIC_TYPES, + NUMERIC_TYPES, + (0, 0.5, -1, 1), + (1, 0.5, 2)): mn_in, mx_in = _dt_min_max(in_dtype) nan_val = np.nan if in_dtype in CFLOAT_TYPES else 10 arr = np.array([mn_in, -1, 0, 1, mx_in, nan_val], dtype=in_dtype) @@ -545,28 +545,31 @@ def test_a2f_scaled_unscaled(): if out_dtype in IUINT_TYPES: nan_fill = np.round(nan_fill) # nan2zero will check whether 0 in scaled to a valid value in output - if in_dtype in CFLOAT_TYPES and not mn_out <= nan_fill <= mx_out: + if (in_dtype in CFLOAT_TYPES and not mn_out <= nan_fill <= mx_out): with pytest.raises(ValueError): - array_to_file( - arr, fobj, out_dtype=out_dtype, divslope=divslope, intercept=intercept, - ) + array_to_file(arr, + fobj, + out_dtype=out_dtype, + divslope=divslope, + intercept=intercept) continue with suppress_warnings(): - back_arr = write_return( - arr, fobj, out_dtype=out_dtype, divslope=divslope, intercept=intercept - ) + back_arr = write_return(arr, fobj, + out_dtype=out_dtype, + divslope=divslope, + intercept=intercept) exp_back = arr.copy() - if ( - in_dtype in IUINT_TYPES - and out_dtype in IUINT_TYPES - and (intercept, divslope) == (0, 1) - ): + if (in_dtype in IUINT_TYPES and + out_dtype in IUINT_TYPES and + (intercept, divslope) == (0, 1)): # Direct iu to iu casting. # Need to clip if ranges not the same. # Use smaller of input, output range to avoid np.clip upcasting # the array because of large clip limits. if (mn_in, mx_in) != (mn_out, mx_out): - exp_back = np.clip(exp_back, max(mn_in, mn_out), min(mx_in, mx_out)) + exp_back = np.clip(exp_back, + max(mn_in, mn_out), + min(mx_in, mx_out)) else: # Need to deal with nans, casting to float, clipping if in_dtype in CFLOAT_TYPES and out_dtype in IUINT_TYPES: exp_back[np.isnan(exp_back)] = 0 @@ -576,7 +579,8 @@ def test_a2f_scaled_unscaled(): exp_back -= intercept if divslope != 1: exp_back /= divslope - if exp_back.dtype.type in CFLOAT_TYPES and out_dtype in IUINT_TYPES: + if (exp_back.dtype.type in CFLOAT_TYPES and + out_dtype in IUINT_TYPES): exp_back = np.round(exp_back).astype(float) exp_back = np.clip(exp_back, *shared_range(float, out_dtype)) exp_back = exp_back.astype(out_dtype) @@ -596,26 +600,35 @@ def test_a2f_nanpos(): def test_a2f_bad_scaling(): # Test that pathological scalers raise an error - NUMERICAL_TYPES = sum([np.sctypes[key] for key in ["int", "uint", "float", "complex"]], []) + NUMERICAL_TYPES = sum([np.sctypes[key] for key in ['int', + 'uint', + 'float', + 'complex']], + []) for in_type, out_type, slope, inter in itertools.product( - NUMERICAL_TYPES, - NUMERICAL_TYPES, - (None, 1, 0, np.nan, -np.inf, np.inf), - (0, np.nan, -np.inf, np.inf), - ): + NUMERICAL_TYPES, + NUMERICAL_TYPES, + (None, 1, 0, np.nan, -np.inf, np.inf), + (0, np.nan, -np.inf, np.inf)): arr = np.ones((2,), dtype=in_type) fobj = BytesIO() if (slope, inter) == (1, 0): - assert_array_equal( - arr, write_return(arr, fobj, out_type, intercept=inter, divslope=slope) - ) + assert_array_equal(arr, + write_return(arr, fobj, out_type, + intercept=inter, + divslope=slope)) elif (slope, inter) == (None, 0): - assert_array_equal( - 0, write_return(arr, fobj, out_type, intercept=inter, divslope=slope) - ) + assert_array_equal(0, + write_return(arr, fobj, out_type, + intercept=inter, + divslope=slope)) else: with pytest.raises(ValueError): - array_to_file(arr, fobj, np.int8, intercept=inter, divslope=slope) + array_to_file(arr, + fobj, + np.int8, + intercept=inter, + divslope=slope) def test_a2f_nan2zero_range(): @@ -635,9 +648,8 @@ def test_a2f_nan2zero_range(): # Pushing zero outside the output data range does not generate error back_arr = write_return(arr_no_nan, fobj, np.int8, intercept=129, nan2zero=True) assert_array_equal([-128, -128, -128, -127], back_arr) - back_arr = write_return( - arr_no_nan, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=True - ) + back_arr = write_return(arr_no_nan, fobj, np.int8, + intercept=257.1, divslope=2, nan2zero=True) assert_array_equal([-128, -128, -128, -128], back_arr) for dt in CFLOAT_TYPES: arr = np.array([-1, 0, 1, np.nan], dtype=dt) @@ -645,9 +657,11 @@ def test_a2f_nan2zero_range(): arr_no_nan = np.array([-1, 0, 1, 2], dtype=dt) # No errors from explicit thresholding # mn thresholding excluding zero - assert_array_equal([1, 1, 1, 0], write_return(arr, fobj, np.int8, mn=1)) + assert_array_equal([1, 1, 1, 0], + write_return(arr, fobj, np.int8, mn=1)) # mx thresholding excluding zero - assert_array_equal([-1, -1, -1, 0], write_return(arr, fobj, np.int8, mx=-1)) + assert_array_equal([-1, -1, -1, 0], + write_return(arr, fobj, np.int8, mx=-1)) # Errors from datatype threshold after scaling back_arr = write_return(arr, fobj, np.int8, intercept=128) assert_array_equal([-128, -128, -127, -128], back_arr) @@ -667,16 +681,17 @@ def test_a2f_nan2zero_range(): with pytest.raises(ValueError): write_return(arr_no_nan, fobj, np.int8, intercept=257.1, divslope=2) # OK with nan2zero false - back_arr = write_return(arr, fobj, np.int8, intercept=257.1, divslope=2, nan2zero=False) + back_arr = write_return(arr, fobj, np.int8, + intercept=257.1, divslope=2, nan2zero=False) assert_array_equal([-128, -128, -128, nan_cast], back_arr) def test_a2f_non_numeric(): # Reminder that we may get structured dtypes - dt = np.dtype([("f1", "f"), ("f2", "i2")]) + dt = np.dtype([('f1', 'f'), ('f2', 'i2')]) arr = np.zeros((2,), dtype=dt) - arr["f1"] = 0.4, 0.6 - arr["f2"] = 10, 12 + arr['f1'] = 0.4, 0.6 + arr['f2'] = 10, 12 fobj = BytesIO() back_arr = write_return(arr, fobj, dt) assert_array_equal(back_arr, arr) @@ -722,12 +737,12 @@ def test_apply_scaling(): ret = apply_read_scaling(np.float32(0), inter=np.float64(2)) assert ret.dtype == np.float64 # Check integer inf upcast - big = f32(type_info(f32)["max"]) + big = f32(type_info(f32)['max']) # Normally this would not upcast assert (i16_arr * big).dtype == np.float32 # An equivalent case is a little hard to find for the intercept - nmant_32 = type_info(np.float32)["nmant"] - big_delta = np.float32(2 ** (floor_log2(big) - nmant_32)) + nmant_32 = type_info(np.float32)['nmant'] + big_delta = np.float32(2**(floor_log2(big) - nmant_32)) assert (i16_arr * big_delta + big).dtype == np.float32 # Upcasting does occur with this routine assert apply_read_scaling(i16_arr, big).dtype == np.float64 @@ -741,8 +756,10 @@ def test_apply_scaling(): assert apply_read_scaling(np.int8(0), f32(-1e38), f32(0.0)).dtype == np.float64 # Non-zero intercept still generates floats assert_dt_equal(apply_read_scaling(i16_arr, 1.0, 1.0).dtype, float) - assert_dt_equal(apply_read_scaling(np.zeros((1,), dtype=np.int32), 1.0, 1.0).dtype, float) - assert_dt_equal(apply_read_scaling(np.zeros((1,), dtype=np.int64), 1.0, 1.0).dtype, float) + assert_dt_equal(apply_read_scaling( + np.zeros((1,), dtype=np.int32), 1.0, 1.0).dtype, float) + assert_dt_equal(apply_read_scaling( + np.zeros((1,), dtype=np.int64), 1.0, 1.0).dtype, float) def test_apply_read_scaling_ints(): @@ -755,7 +772,7 @@ def test_apply_read_scaling_ints(): def test_apply_read_scaling_nones(): # Check that we can pass None as slope and inter to apply read scaling - arr = np.arange(10, dtype=np.int16) + arr=np.arange(10, dtype=np.int16) assert_array_equal(apply_read_scaling(arr, None, None), arr) assert_array_equal(apply_read_scaling(arr, 2, None), arr * 2) assert_array_equal(apply_read_scaling(arr, None, 1), arr + 1) @@ -775,8 +792,7 @@ def test_working_type(): # need this because of the very confusing np.int32 != np.intp (on 32 bit). def wt(*args, **kwargs): return np.dtype(working_type(*args, **kwargs)).str - - d1 = np.atleast_1d + d1=np.atleast_1d for in_type in NUMERIC_TYPES: in_ts = np.dtype(in_type).str assert wt(in_type) == in_ts @@ -796,31 +812,30 @@ def wt(*args, **kwargs): out_val = in_val - d1(i_val) assert wt(in_type, 1, i_val) == out_val.dtype.str # Combine scaling and intercept - out_val = (in_val - d1(i_val)) / d1(sl_val) + out_val=(in_val - d1(i_val)) / d1(sl_val) assert wt(in_type, sl_val, i_val) == out_val.dtype.str # Confirm that type codes and dtypes work as well f32s = np.dtype(np.float32).str - assert wt("f4", 1, 0) == f32s - assert wt(np.dtype("f4"), 1, 0) == f32s + assert wt('f4', 1, 0) == f32s + assert wt(np.dtype('f4'), 1, 0) == f32s def test_better_float(): # Better float function def check_against(f1, f2): return f1 if FLOAT_TYPES.index(f1) >= FLOAT_TYPES.index(f2) else f2 - for first in FLOAT_TYPES: - for other in IUINT_TYPES + np.sctypes["complex"]: + for other in IUINT_TYPES + np.sctypes['complex']: assert better_float_of(first, other) == first assert better_float_of(other, first) == first - for other2 in IUINT_TYPES + np.sctypes["complex"]: + for other2 in IUINT_TYPES + np.sctypes['complex']: assert better_float_of(other, other2) == np.float32 assert better_float_of(other, other2, np.float64) == np.float64 for second in FLOAT_TYPES: assert better_float_of(first, second) == check_against(first, second) # Check codes and dtypes work - assert better_float_of("f4", "f8", "f4") == np.float64 - assert better_float_of("i4", "i8", "f8") == np.float64 + assert better_float_of('f4', 'f8', 'f4') == np.float64 + assert better_float_of('i4', 'i8', 'f8') == np.float64 def test_best_write_scale_ftype(): @@ -834,15 +849,15 @@ def test_best_write_scale_ftype(): assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of(dtt, np.float32) assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of(dtt, np.float32) # Overflowing ints with scaling results in upcast - best_vals = ((np.float32, np.float64),) + best_vals=((np.float32, np.float64),) if np.longdouble in OK_FLOATS: best_vals += ((np.float64, np.longdouble),) for lower_t, higher_t in best_vals: # Information on this float L_info = type_info(lower_t) - t_max = L_info["max"] - nmant = L_info["nmant"] # number of significand digits - big_delta = lower_t(2 ** (floor_log2(t_max) - nmant)) # delta below max + t_max = L_info['max'] + nmant = L_info['nmant'] # number of significand digits + big_delta = lower_t(2**(floor_log2(t_max) - nmant)) # delta below max # Even large values that don't overflow don't change output arr = np.array([0, t_max], dtype=lower_t) assert best_write_scale_ftype(arr, 1, 0) == lower_t @@ -854,29 +869,28 @@ def test_best_write_scale_ftype(): assert best_write_scale_ftype(arr, 1, -big_delta / 2.01) == lower_t assert best_write_scale_ftype(arr, 1, -big_delta / 2.0) == higher_t # With infs already in input, default type returns - arr[0] = np.inf + arr[0]=np.inf assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t - arr[0] = -np.inf + arr[0]=-np.inf assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t def test_can_cast(): - tests = ( - (np.float32, np.float32, True, True, True), - (np.float64, np.float32, True, True, True), - (np.complex128, np.float32, False, False, False), - (np.float32, np.complex128, True, True, True), - (np.float32, np.uint8, False, True, True), - (np.uint32, np.complex128, True, True, True), - (np.int64, np.float32, True, True, True), - (np.complex128, np.int16, False, False, False), - (np.float32, np.int16, False, True, True), - (np.uint8, np.int16, True, True, True), - (np.uint16, np.int16, False, True, True), - (np.int16, np.uint16, False, False, True), - (np.int8, np.uint16, False, False, True), - (np.uint16, np.uint8, False, True, True), - ) + tests=((np.float32, np.float32, True, True, True), + (np.float64, np.float32, True, True, True), + (np.complex128, np.float32, False, False, False), + (np.float32, np.complex128, True, True, True), + (np.float32, np.uint8, False, True, True), + (np.uint32, np.complex128, True, True, True), + (np.int64, np.float32, True, True, True), + (np.complex128, np.int16, False, False, False), + (np.float32, np.int16, False, True, True), + (np.uint8, np.int16, True, True, True), + (np.uint16, np.int16, False, True, True), + (np.int16, np.uint16, False, False, True), + (np.int8, np.uint16, False, False, True), + (np.uint16, np.uint8, False, True, True), + ) for intype, outtype, def_res, scale_res, all_res in tests: assert def_res == can_cast(intype, outtype) assert scale_res == can_cast(intype, outtype, False, True) @@ -886,21 +900,21 @@ def test_can_cast(): def test_write_zeros(): bio = BytesIO() write_zeros(bio, 10000) - assert bio.getvalue() == b"\x00" * 10000 + assert bio.getvalue() == b'\x00' * 10000 bio.seek(0) bio.truncate(0) write_zeros(bio, 10000, 256) - assert bio.getvalue() == b"\x00" * 10000 + assert bio.getvalue() == b'\x00' * 10000 bio.seek(0) bio.truncate(0) write_zeros(bio, 200, 256) - assert bio.getvalue() == b"\x00" * 200 + assert bio.getvalue() == b'\x00' * 200 def test_seek_tell(): # Test seek tell routine bio = BytesIO() - in_files = bio, "test.bin", "test.gz", "test.bz2" + in_files = bio, 'test.bin', 'test.gz', 'test.bz2' start = 10 end = 100 diff = end - start @@ -910,28 +924,28 @@ def test_seek_tell(): st = functools.partial(seek_tell, write0=write0) bio.seek(0) # First write the file - with ImageOpener(in_file, "wb") as fobj: + with ImageOpener(in_file, 'wb') as fobj: assert fobj.tell() == 0 # already at position - OK st(fobj, 0) assert fobj.tell() == 0 # Move position by writing - fobj.write(b"\x01" * start) + fobj.write(b'\x01' * start) assert fobj.tell() == start # Files other than BZ2Files can seek forward on write, leaving # zeros in their wake. BZ2Files can't seek when writing, unless # we enable the write0 flag to seek_tell - if not write0 and in_file == "test.bz2": # Can't seek write in bz2 + if not write0 and in_file == 'test.bz2': # Can't seek write in bz2 # write the zeros by hand for the read test below - fobj.write(b"\x00" * diff) + fobj.write(b'\x00' * diff) else: st(fobj, end) assert fobj.tell() == end # Write tail - fobj.write(b"\x02" * tail) + fobj.write(b'\x02' * tail) bio.seek(0) # Now read back the file testing seek_tell in reading mode - with ImageOpener(in_file, "rb") as fobj: + with ImageOpener(in_file, 'rb') as fobj: assert fobj.tell() == 0 st(fobj, 0) assert fobj.tell() == 0 @@ -943,23 +957,23 @@ def test_seek_tell(): st(fobj, 0) bio.seek(0) # Check we have the expected written output - with ImageOpener(in_file, "rb") as fobj: - assert fobj.read() == b"\x01" * start + b"\x00" * diff + b"\x02" * tail - for in_file in ("test2.gz", "test2.bz2"): + with ImageOpener(in_file, 'rb') as fobj: + assert fobj.read() == b'\x01' * start + b'\x00' * diff + b'\x02' * tail + for in_file in ('test2.gz', 'test2.bz2'): # Check failure of write seek backwards - with ImageOpener(in_file, "wb") as fobj: - fobj.write(b"g" * 10) + with ImageOpener(in_file, 'wb') as fobj: + fobj.write(b'g' * 10) assert fobj.tell() == 10 seek_tell(fobj, 10) assert fobj.tell() == 10 with pytest.raises(IOError): seek_tell(fobj, 5) # Make sure read seeks don't affect file - with ImageOpener(in_file, "rb") as fobj: + with ImageOpener(in_file, 'rb') as fobj: seek_tell(fobj, 10) seek_tell(fobj, 0) - with ImageOpener(in_file, "rb") as fobj: - assert fobj.read() == b"g" * 10 + with ImageOpener(in_file, 'rb') as fobj: + assert fobj.read() == b'g' * 10 def test_seek_tell_logic(): @@ -970,15 +984,15 @@ def test_seek_tell_logic(): assert bio.tell() == 10 class BabyBio(BytesIO): + def seek(self, *args): raise IOError() - bio = BabyBio() # Fresh fileobj, position 0, can't seek - error with pytest.raises(IOError): bio.seek(10) # Put fileobj in correct position by writing - ZEROB = b"\x00" + ZEROB = b'\x00' bio.write(ZEROB * 10) seek_tell(bio, 10) # already there, nothing to do assert bio.tell() == 10 @@ -993,108 +1007,110 @@ def seek(self, *args): def test_fname_ext_ul_case(): # Get filename ignoring the case of the filename extension with InTemporaryDirectory(): - with open("afile.TXT", "wt") as fobj: - fobj.write("Interesting information") + with open('afile.TXT', 'wt') as fobj: + fobj.write('Interesting information') # OSX usually has case-insensitive file systems; Windows also - os_cares_case = not exists("afile.txt") - with open("bfile.txt", "wt") as fobj: - fobj.write("More interesting information") + os_cares_case = not exists('afile.txt') + with open('bfile.txt', 'wt') as fobj: + fobj.write('More interesting information') # If there is no file, the case doesn't change - assert fname_ext_ul_case("nofile.txt") == "nofile.txt" - assert fname_ext_ul_case("nofile.TXT") == "nofile.TXT" + assert fname_ext_ul_case('nofile.txt') == 'nofile.txt' + assert fname_ext_ul_case('nofile.TXT') == 'nofile.TXT' # If there is a file, accept upper or lower case for ext if os_cares_case: - assert fname_ext_ul_case("afile.txt") == "afile.TXT" - assert fname_ext_ul_case("bfile.TXT") == "bfile.txt" + assert fname_ext_ul_case('afile.txt') == 'afile.TXT' + assert fname_ext_ul_case('bfile.TXT') == 'bfile.txt' else: - assert fname_ext_ul_case("afile.txt") == "afile.txt" - assert fname_ext_ul_case("bfile.TXT") == "bfile.TXT" - assert fname_ext_ul_case("afile.TXT") == "afile.TXT" - assert fname_ext_ul_case("bfile.txt") == "bfile.txt" + assert fname_ext_ul_case('afile.txt') == 'afile.txt' + assert fname_ext_ul_case('bfile.TXT') == 'bfile.TXT' + assert fname_ext_ul_case('afile.TXT') == 'afile.TXT' + assert fname_ext_ul_case('bfile.txt') == 'bfile.txt' # Not mixed case though - assert fname_ext_ul_case("afile.TxT") == "afile.TxT" + assert fname_ext_ul_case('afile.TxT') == 'afile.TxT' def test_allopen(): # This import into volumeutils is for compatibility. The code is the # ``openers`` module. with clear_and_catch_warnings() as w: - warnings.filterwarnings("once", category=DeprecationWarning) + warnings.filterwarnings('once', category=DeprecationWarning) # Test default mode is 'rb' fobj = allopen(__file__) # Check we got the deprecation warning assert len(w) == 1 - assert fobj.mode == "rb" + assert fobj.mode == 'rb' # That we can set it - fobj = allopen(__file__, "r") - assert fobj.mode == "r" + fobj = allopen(__file__, 'r') + assert fobj.mode == 'r' # with keyword arguments - fobj = allopen(__file__, mode="r") - assert fobj.mode == "r" + fobj = allopen(__file__, mode='r') + assert fobj.mode == 'r' # fileobj returns fileobj - msg = b"tiddle pom" + msg = b'tiddle pom' sobj = BytesIO(msg) fobj = allopen(sobj) assert fobj.read() == msg # mode is gently ignored - fobj = allopen(sobj, mode="r") + fobj = allopen(sobj, mode='r') def test_allopen_compresslevel(): # We can set the default compression level with the module global # Get some data to compress - with open(__file__, "rb") as fobj: - my_self = fobj.read() + with open(__file__, 'rb') as fobj: + my_self=fobj.read() # Prepare loop - fname = "test.gz" - sizes = {} + fname='test.gz' + sizes={} # Stash module global from .. import volumeutils as vu - original_compress_level = vu.default_compresslevel assert original_compress_level == 1 try: with InTemporaryDirectory(): - for compresslevel in ("default", 1, 9): - if compresslevel != "default": + for compresslevel in ('default', 1, 9): + if compresslevel != 'default': vu.default_compresslevel = compresslevel with warnings.catch_warnings(): warnings.simplefilter("ignore") - with allopen(fname, "wb") as fobj: + with allopen(fname, 'wb') as fobj: fobj.write(my_self) - with open(fname, "rb") as fobj: + with open(fname, 'rb') as fobj: my_selves_smaller = fobj.read() sizes[compresslevel] = len(my_selves_smaller) - assert sizes["default"] == sizes[1] + assert sizes['default'] == sizes[1] assert sizes[1] > sizes[9] finally: vu.default_compresslevel = original_compress_level def test_shape_zoom_affine(): - shape = (3, 5, 7) - zooms = (3, 2, 1) + shape=(3, 5, 7) + zooms=(3, 2, 1) res = shape_zoom_affine(shape, zooms) - exp = np.array( - [[-3.0, 0.0, 0.0, 3.0], [0.0, 2.0, 0.0, -4.0], [0.0, 0.0, 1.0, -3.0], [0.0, 0.0, 0.0, 1.0],] - ) + exp = np.array([[-3., 0., 0., 3.], + [0., 2., 0., -4.], + [0., 0., 1., -3.], + [0., 0., 0., 1.]]) assert_array_almost_equal(res, exp) res = shape_zoom_affine((3, 5), (3, 2)) - exp = np.array( - [[-3.0, 0.0, 0.0, 3.0], [0.0, 2.0, 0.0, -4.0], [0.0, 0.0, 1.0, -0.0], [0.0, 0.0, 0.0, 1.0],] - ) + exp = np.array([[-3., 0., 0., 3.], + [0., 2., 0., -4.], + [0., 0., 1., -0.], + [0., 0., 0., 1.]]) assert_array_almost_equal(res, exp) res = shape_zoom_affine(shape, zooms, False) - exp = np.array( - [[3.0, 0.0, 0.0, -3.0], [0.0, 2.0, 0.0, -4.0], [0.0, 0.0, 1.0, -3.0], [0.0, 0.0, 0.0, 1.0],] - ) + exp = np.array([[3., 0., 0., -3.], + [0., 2., 0., -4.], + [0., 0., 1., -3.], + [0., 0., 0., 1.]]) assert_array_almost_equal(res, exp) def test_rec2dict(): - r = np.zeros((), dtype=[("x", "i4"), ("s", "S10")]) + r = np.zeros((), dtype=[('x', 'i4'), ('s', 'S10')]) d = rec2dict(r) - assert d == {"x": 0, "s": b""} + assert d == {'x': 0, 's': b''} def test_dtypes(): @@ -1105,36 +1121,38 @@ def test_dtypes(): # In [10]: dtype(' Date: Mon, 11 Nov 2019 16:35:45 -0500 Subject: [PATCH 375/689] fixed spacing, take 27 --- nibabel/tests/test_volumeutils.py | 38 +++++++++++++++---------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 19e5f3bd27..7cd0920f6c 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -792,7 +792,7 @@ def test_working_type(): # need this because of the very confusing np.int32 != np.intp (on 32 bit). def wt(*args, **kwargs): return np.dtype(working_type(*args, **kwargs)).str - d1=np.atleast_1d + d1 = np.atleast_1d for in_type in NUMERIC_TYPES: in_ts = np.dtype(in_type).str assert wt(in_type) == in_ts @@ -812,7 +812,7 @@ def wt(*args, **kwargs): out_val = in_val - d1(i_val) assert wt(in_type, 1, i_val) == out_val.dtype.str # Combine scaling and intercept - out_val=(in_val - d1(i_val)) / d1(sl_val) + out_val = (in_val - d1(i_val)) / d1(sl_val) assert wt(in_type, sl_val, i_val) == out_val.dtype.str # Confirm that type codes and dtypes work as well f32s = np.dtype(np.float32).str @@ -849,7 +849,7 @@ def test_best_write_scale_ftype(): assert best_write_scale_ftype(arr, np.float32(2), 0) == better_float_of(dtt, np.float32) assert best_write_scale_ftype(arr, 1, np.float32(1)) == better_float_of(dtt, np.float32) # Overflowing ints with scaling results in upcast - best_vals=((np.float32, np.float64),) + best_vals = ((np.float32, np.float64),) if np.longdouble in OK_FLOATS: best_vals += ((np.float64, np.longdouble),) for lower_t, higher_t in best_vals: @@ -869,14 +869,14 @@ def test_best_write_scale_ftype(): assert best_write_scale_ftype(arr, 1, -big_delta / 2.01) == lower_t assert best_write_scale_ftype(arr, 1, -big_delta / 2.0) == higher_t # With infs already in input, default type returns - arr[0]=np.inf + arr[0] = np.inf assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t - arr[0]=-np.inf + arr[0] = -np.inf assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t def test_can_cast(): - tests=((np.float32, np.float32, True, True, True), + tests = ((np.float32, np.float32, True, True, True), (np.float64, np.float32, True, True, True), (np.complex128, np.float32, False, False, False), (np.float32, np.complex128, True, True, True), @@ -1060,8 +1060,8 @@ def test_allopen_compresslevel(): with open(__file__, 'rb') as fobj: my_self=fobj.read() # Prepare loop - fname='test.gz' - sizes={} + fname = 'test.gz' + sizes = {} # Stash module global from .. import volumeutils as vu original_compress_level = vu.default_compresslevel @@ -1085,8 +1085,8 @@ def test_allopen_compresslevel(): def test_shape_zoom_affine(): - shape=(3, 5, 7) - zooms=(3, 2, 1) + shape = (3, 5, 7) + zooms = (3, 2, 1) res = shape_zoom_affine(shape, zooms) exp = np.array([[-3., 0., 0., 3.], [0., 2., 0., -4.], @@ -1121,7 +1121,7 @@ def test_dtypes(): # In [10]: dtype(' Date: Mon, 11 Nov 2019 16:36:52 -0500 Subject: [PATCH 376/689] fixed spacing, take 28 --- nibabel/tests/test_volumeutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 7cd0920f6c..4072f85131 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -1058,7 +1058,7 @@ def test_allopen_compresslevel(): # We can set the default compression level with the module global # Get some data to compress with open(__file__, 'rb') as fobj: - my_self=fobj.read() + my_self = fobj.read() # Prepare loop fname = 'test.gz' sizes = {} From b37af09be802501363c7d6ecc5b783e7fca999cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Tue, 12 Nov 2019 19:59:28 -0500 Subject: [PATCH 377/689] Addressed @effigies's comments. --- nibabel/streamlines/array_sequence.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 6ea869bda7..279762932c 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -5,8 +5,7 @@ import numpy as np -from nibabel.deprecated import deprecate_with_version - +from ..deprecated import deprecate_with_version MEGABYTE = 1024 * 1024 From 3b6d7fe1b5134cee762eafe70be0985812e09a3d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 13 Nov 2019 09:16:19 -0500 Subject: [PATCH 378/689] RF: Avoid unneeded reference --- nibabel/arrayproxy.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 3710bc6832..eda456c4b7 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -362,11 +362,8 @@ def _get_scaled(self, dtype, slicer): use_dtype = scl_slope.dtype if dtype is None else dtype slope = scl_slope.astype(use_dtype) inter = scl_inter.astype(use_dtype) - # Read array - raw_data = self._get_unscaled(slicer=slicer) - # Upcast as necessary for big slopes, intercepts - scaled = apply_read_scaling(raw_data, slope, inter) - del raw_data + # Read array and upcast as necessary for big slopes, intercepts + scaled = apply_read_scaling(self._get_unscaled(slicer=slicer), slope, inter) if dtype is not None: scaled = scaled.astype(np.promote_types(scaled.dtype, dtype), copy=False) return scaled From d21a9809a53c184275766fd0b01a2a301f2c885f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Alexandre=20C=C3=B4t=C3=A9?= Date: Wed, 13 Nov 2019 10:37:17 -0500 Subject: [PATCH 379/689] ENH: speed up TRK loading by setting a suitable buffer size. --- nibabel/streamlines/array_sequence.py | 9 +++++++-- nibabel/streamlines/trk.py | 17 ++++++++++++++++- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/nibabel/streamlines/array_sequence.py b/nibabel/streamlines/array_sequence.py index 279762932c..71b4bcb3be 100644 --- a/nibabel/streamlines/array_sequence.py +++ b/nibabel/streamlines/array_sequence.py @@ -547,7 +547,7 @@ def load(cls, filename): return seq -def create_arraysequences_from_generator(gen, n): +def create_arraysequences_from_generator(gen, n, buffer_sizes=None): """ Creates :class:`ArraySequence` objects from a generator yielding tuples Parameters @@ -557,8 +557,13 @@ def create_arraysequences_from_generator(gen, n): array sequences. n : int Number of :class:`ArraySequences` object to create. + buffer_sizes : list of float, optional + Sizes (in Mb) for each ArraySequence's buffer. """ - seqs = [ArraySequence() for _ in range(n)] + if buffer_sizes is None: + buffer_sizes = [4] * n + + seqs = [ArraySequence(buffer_size=size) for size in buffer_sizes] for data in gen: for i, seq in enumerate(seqs): if data[i].nbytes > 0: diff --git a/nibabel/streamlines/trk.py b/nibabel/streamlines/trk.py index f67ab1509a..2397a3ff24 100644 --- a/nibabel/streamlines/trk.py +++ b/nibabel/streamlines/trk.py @@ -372,8 +372,23 @@ def _read(): tractogram = LazyTractogram.from_data_func(_read) else: + + # Speed up loading by guessing a suitable buffer size. + with Opener(fileobj) as f: + old_file_position = f.tell() + f.seek(0, os.SEEK_END) + size = f.tell() + f.seek(old_file_position, os.SEEK_SET) + + # Buffer size is in mega bytes. + mbytes = size // (1024 * 1024) + sizes = [mbytes, 4, 4] + if hdr["nb_scalars_per_point"] > 0: + sizes = [mbytes // 2, mbytes // 2, 4] + trk_reader = cls._read(fileobj, hdr) - arr_seqs = create_arraysequences_from_generator(trk_reader, n=3) + arr_seqs = create_arraysequences_from_generator(trk_reader, n=3, + buffer_sizes=sizes) streamlines, scalars, properties = arr_seqs properties = np.asarray(properties) # Actually a 2d array. tractogram = Tractogram(streamlines) From 8be3a0eedcf17030714d3a8d611e0826a92745ec Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 13 Nov 2019 14:07:39 -0500 Subject: [PATCH 380/689] DOC: Update docstrings, comments, for clarity --- nibabel/ecat.py | 19 +++++++++++++++++++ nibabel/minc1.py | 19 +++++++++++++++++++ nibabel/parrec.py | 24 ++++++++++++++++++++++++ nibabel/tests/test_image_api.py | 3 +++ 4 files changed, 65 insertions(+) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index cc3345292e..a0923f0753 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -705,6 +705,25 @@ def __array__(self): return data def get_scaled(self, dtype=None): + """ Read data from file and apply scaling + + The dtype of the returned array is the narrowest dtype that can + represent the data without overflow, and is at least as wide as + the dtype parameter. + + If dtype is unspecified, it is automatically determined. + + Parameters + ---------- + dtype : numpy dtype specifier + A numpy dtype specifier specifying the narrowest acceptable + dtype. + + Returns + ------- + array + Scaled of image data of data type `dtype`. + """ data = self.__array__() if dtype is None: return data diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 45f2538415..c41af5c454 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -269,6 +269,25 @@ def _get_scaled(self, dtype, slicer): return data.astype(final_type, copy=False) def get_scaled(self, dtype=None): + """ Read data from file and apply scaling + + The dtype of the returned array is the narrowest dtype that can + represent the data without overflow, and is at least as wide as + the dtype parameter. + + If dtype is unspecified, it is automatically determined. + + Parameters + ---------- + dtype : numpy dtype specifier + A numpy dtype specifier specifying the narrowest acceptable + dtype. + + Returns + ------- + array + Scaled of image data of data type `dtype`. + """ return self._get_scaled(dtype=dtype, slicer=()) def __array__(self): diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 1f6193db13..59eac79809 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -670,9 +670,33 @@ def _get_scaled(self, dtype, slicer): def get_unscaled(self): + """ Read data from file + + This is an optional part of the proxy API + """ return self._get_unscaled(slicer=()) def get_scaled(self, dtype=None): + """ Read data from file and apply scaling + + The dtype of the returned array is the narrowest dtype that can + represent the data without overflow, and is at least as wide as + the dtype parameter. + + If dtype is unspecified, it is the wider of the dtypes of the slopes + or intercepts + + Parameters + ---------- + dtype : numpy dtype specifier + A numpy dtype specifier specifying the narrowest acceptable + dtype. + + Returns + ------- + array + Scaled of image data of data type `dtype`. + """ return self._get_scaled(dtype=dtype, slicer=()) def __array__(self): diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index f571311a7e..10b61628c7 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -314,6 +314,9 @@ def _check_proxy_interface(self, imaker, meth_name): # New data dtype, no caching, doesn't use or alter cache fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') # We get back the original read, not the modified cache + # Allow for small rounding error when the data is scaled with 32-bit + # factors, rather than 64-bit factors and then cast to float-32 + # Use rtol/atol from numpy.allclose assert_allclose(fdata_new_dt, proxy_data.astype('f4'), rtol=1e-05, atol=1e-08) assert_equal(fdata_new_dt.dtype, np.float32) # The original cache stays in place, for default float64 From f81584afe4cf2d980e25eec6f2d5c0c19a5510e0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Nov 2019 12:20:38 -0500 Subject: [PATCH 381/689] MNT: Update mailmap --- .mailmap | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.mailmap b/.mailmap index ea15651170..9eaa0d1434 100644 --- a/.mailmap +++ b/.mailmap @@ -14,6 +14,7 @@ Basile Pinsard bpinsard Basile Pinsard bpinsard Ben Cipollini Ben Cipollini Bertrand Thirion bthirion +Cameron Riddell <31414128+CRiddler@users.noreply.github.com> Christian Haselgrove Christian Haselgrove Christopher J. Markiewicz Chris Johnson Christopher J. Markiewicz Chris Markiewicz @@ -40,6 +41,8 @@ Kesshi Jordan kesshijordan Kevin S. Hahn Kevin S. Hahn Konstantinos Raktivan constracti Krish Subramaniam Krish Subramaniam +Krzysztof J. Gorgolewski +Krzysztof J. Gorgolewski Marc-Alexandre Côté Marc-Alexandre Cote Mathias Goncalves mathiasg Matthew Cieslak Matt Cieslak @@ -48,6 +51,7 @@ Michael Hanke Michiel Cottaar Michiel Cottaar Ly Nguyen lxn2 Oliver P. Hinds ohinds +Oscar Esteban Paul McCarthy Paul McCarthy Satrajit Ghosh Satrajit Ghosh Serge Koudoro skoudoro From d4722a33af80e6129afbeff0a3188bf7908005c6 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Nov 2019 12:22:12 -0500 Subject: [PATCH 382/689] MNT: Update Zenodo --- .zenodo.json | 41 +++++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 9b4621464f..123f5d1bcc 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -68,6 +68,10 @@ "name": "Lee, Gregory R.", "orcid": "0000-0001-8895-2740" }, + { + "name": "Wang, Hao-Ting", + "orcid": "0000-0003-4078-2038" + }, { "affiliation": "Harvard University - Psychology", "name": "Kastman, Erik", @@ -92,6 +96,11 @@ "name": "Goncalves, Mathias", "orcid": "0000-0002-7252-7771" }, + { + "affiliation": "Department of Psychology, University of California Davis, CA, USA", + "name": "Riddell, Cameron", + "orcid": "0000-0001-8950-0375" + }, { "name": "Burns, Christopher" }, @@ -119,9 +128,24 @@ { "name": "Vincent, Robert D." }, + { + "affiliation": "Center for Magnetic Resonance Research, University of Minnesota", + "name": "Braun, Henry", + "orcid": "0000-0001-7003-9822" + }, { "name": "Subramaniam, Krish" }, + { + "affiliation": "MIT", + "name": "Jarecka, Dorota", + "orcid": "0000-0003-1857-8129" + }, + { + "affiliation": "Google", + "name": "Gorgolewski, Krzysztof J.", + "orcid": "0000-0003-3321-7583" + }, { "affiliation": "Rotman Research Institute, Baycrest Health Sciences, Toronto, ON, Canada", "name": "Raamana, Pradeep Reddy", @@ -147,6 +171,11 @@ { "name": "Hymers, Mark" }, + { + "affiliation": "Department of Psychology, Stanford University, CA, USA", + "name": "Esteban, Oscar", + "orcid": "0000-0001-8435-6191" + }, { "name": "Koudoro, Serge" }, @@ -170,6 +199,10 @@ { "name": "St-Jean, Samuel" }, + { + "name": "Panfilov, Egor", + "orcid": "0000-0002-2500-6375" + }, { "name": "Garyfallidis, Eleftherios" }, @@ -197,9 +230,6 @@ { "name": "Fauber, Bennet" }, - { - "name": "Panfilov, Egor" - }, { "affiliation": "McGill University", "name": "Poline, Jean-Baptiste", @@ -244,11 +274,6 @@ "name": "P\u00e9rez-Garc\u00eda, Fernando", "orcid": "0000-0001-9090-3024" }, - { - "affiliation": "Center for Magnetic Resonance Research, University of Minnesota", - "name": "Braun, Henry", - "orcid": "0000-0001-7003-9822" - }, { "name": "Solovey, Igor" }, From 633185290acd795723b2860415f8f245ac850b9f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Nov 2019 12:45:23 -0500 Subject: [PATCH 383/689] DOC: Update changelog --- Changelog | 53 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/Changelog b/Changelog index fb004c93e3..f9f4f00df5 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,59 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. +3.0.0 (To Be Determined) +======================== + +New features +------------ +* ArrayProxy method ``get_scaled()`` scales data with a dtype of a + specified precision, promoting as necessary to avoid overflow. This + is to used in ``img.get_fdata()`` to control memory usage. (pr/833) + (CM, reviewed by Ross Markello) +* GiftiImage method ``agg_data()`` to return usable data arrays (pr/793) + (Hao-Ting Wang, reviewed by CM) +* Accept ``os.PathLike`` objects in place of filenames (pr/610) (Cameron + Riddell, reviewed by MB, CM) +* Function to calculate obliquity of affines (pr/815) (Oscar Esteban, + reviewed by MB) + +Enhancements +------------ +* ``get_fdata(dtype=np.float32)`` will attempt to avoid casting data to + ``np.float64`` when scaling parameters would otherwise promote the data + type unnecessarily. (pr/833) (CM, reviewed by Ross Markello) +* ``ArraySequence`` now supports a large set of Python operators to combine + or update in-place. (pr/811) (MC, reviewed by Serge Koudoro, Philippe Poulin, + CM, MB) +* Warn, rather than fail, on DICOMs with unreadable Siemens CSA tags (pr/818) + (Henry Braun, reviewed by CM) +* Improve clarity of coordinate system tutorial (pr/823) (Egor Panfilov, + reviewed by MB) + +Bug fixes +--------- +* Sliced ``Tractogram``s no longer ``apply_affine`` to the original + ``Tractogram``'s streamlines. (pr/811) (MC, reviewed by Serge Koudoro, + Philippe Poulin, CM, MB) +* Re-import externals/netcdf.py from scipy to resolve numpy deprecation + (pr/821) (CM) + +Maintenance +----------- +* Support Python >=3.5.1, including Python 3.8.0 (pr/787) (CM) +* Manage versioning with slightly customized Versioneer (pr/786) (CM) +* Reference Nipy Community Code and Nibabel Developer Guidelines in + GitHub community documents (pr/778) (CM, reviewed by MB) + +API changes and deprecations +---------------------------- +* Deprecate ``ArraySequence.data`` in favor of ``ArraySequence.get_data()``, + which will return a copy. ``ArraySequence.data`` now returns a read-only + view. (pr/811) (MC, reviewed by Serge Koudoro, Philippe Poulin, CM, MB) +* Deprecate ``DataobjImage.get_data()`` API, to be removed in nibabel 5.0 + (pr/794, pr/809) (CM, reviewed by MB) + + 2.5.1 (Monday 23 September 2019) ================================ From 00b362e6e8ce4fe0c0797c11c51283cc31aedcea Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Nov 2019 12:45:35 -0500 Subject: [PATCH 384/689] MNT: Improve version determination on branches with digits --- nibabel/_version.py | 5 +++++ versioneer.py | 10 ++++++++++ 2 files changed, 15 insertions(+) diff --git a/nibabel/_version.py b/nibabel/_version.py index 08332076d9..60031b4d17 100644 --- a/nibabel/_version.py +++ b/nibabel/_version.py @@ -208,6 +208,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] + # CJM: Nibabel fix to filter out refs that exactly match prefix + # or that don't start with a number once the prefix is stripped + # (Mostly a concern when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %s" % r) return {"version": r, diff --git a/versioneer.py b/versioneer.py index 18682b93bd..7c8333493e 100644 --- a/versioneer.py +++ b/versioneer.py @@ -629,6 +629,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] + # CJM: Nibabel fix to filter out refs that exactly match prefix + # or that don't start with a number once the prefix is stripped + # (Mostly a concern when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %%s" %% r) return {"version": r, @@ -1029,6 +1034,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] + # CJM: Nibabel fix to filter out refs that exactly match prefix + # or that don't start with a number once the prefix is stripped + # (Mostly a concern when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %s" % r) return {"version": r, From 01cfc19c64a2c3cc2f84ec2d1ef8f3f8b068b787 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Nov 2019 12:46:27 -0500 Subject: [PATCH 385/689] DOC: Update author list --- doc/source/index.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 09f09d4883..8b469631dd 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -96,9 +96,14 @@ contributed code and discussion (in rough order of appearance): * Samir Reddigari * Konstantinos Raktivan * Matt Cieslak -* Egor Pafilov +* Egor Panfilov * Jath Palasubramaniam * Henry Braun +* Oscar Esteban +* Cameron Riddell +* Hao-Ting Wang +* Dorota Jarecka +* Chris Gorgolewski License reprise =============== From cfa1cb4e660a7137d8cc48249886d19eea7196de Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 15 Nov 2019 12:46:48 -0500 Subject: [PATCH 386/689] MNT: Version 3.0.0rc1 --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index dbd877318c..dc1b903b78 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -13,7 +13,7 @@ _version_major = 3 _version_minor = 0 _version_micro = 0 -_version_extra = 'dev' +_version_extra = 'rc1' # _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" From 8750b7b0dc3604eaa74a6a48778719599bb8a908 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 16 Nov 2019 10:35:45 -0500 Subject: [PATCH 387/689] MNT: Version 3.0.0rc1.post.dev --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index dc1b903b78..5953ee2d8f 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -13,7 +13,7 @@ _version_major = 3 _version_minor = 0 _version_micro = 0 -_version_extra = 'rc1' +_version_extra = 'rc1.post.dev' # _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" From 5d512eb9176715bd94d66fd7042f5be2614eb0fd Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 16 Nov 2019 10:36:17 -0500 Subject: [PATCH 388/689] MNT: Add gitpython and twine to "dev" extra --- setup.cfg | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/setup.cfg b/setup.cfg index 3b5bddae19..bc5a75c5ad 100644 --- a/setup.cfg +++ b/setup.cfg @@ -43,6 +43,9 @@ include_package_data = True [options.extras_require] dicom = pydicom >=0.9.9 +dev = + gitpython + twine doc = matplotlib >= 1.3.1 mock @@ -58,7 +61,9 @@ test = pytest all = %(dicom)s + %(dev)s %(doc)s + %(style)s %(test)s [options.entry_points] From 71ec9649be5731dc0fa14f76f38cad88eef414ed Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2019 10:37:55 -0500 Subject: [PATCH 389/689] MNT: Fix Dorota Jarecka ORCID --- .zenodo.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.zenodo.json b/.zenodo.json index 123f5d1bcc..54cbe2f28b 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -139,7 +139,7 @@ { "affiliation": "MIT", "name": "Jarecka, Dorota", - "orcid": "0000-0003-1857-8129" + "orcid": "0000-0001-8282-2988" }, { "affiliation": "Google", From 7120347111d0f0a2af29bb9ba2d133f61fdb17b8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 21 Nov 2019 11:37:18 -0500 Subject: [PATCH 390/689] MNT: Drop test data from MANIFEST.in Update setup.cfg option to enable package data specification --- MANIFEST.in | 6 ------ setup.cfg | 1 - 2 files changed, 7 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index a901441ed6..381cab34a5 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,11 +3,5 @@ include Changelog TODO requirements.txt recursive-include doc * recursive-include bin * recursive-include tools * -# put this stuff back into setup.py (package_data) once I'm enlightened -# enough to accomplish this herculean task -recursive-include nibabel/tests/data * -recursive-include nibabel/externals/tests/data * -recursive-include nibabel/nicom/tests/data * -recursive-include nibabel/gifti/tests/data * include versioneer.py include nibabel/_version.py diff --git a/setup.cfg b/setup.cfg index bc5a75c5ad..069d93d007 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,7 +38,6 @@ tests_require = test_suite = nose.collector zip_safe = False packages = find: -include_package_data = True [options.extras_require] dicom = From 7d1badf07c87ea18ac28714feaa118c6f84c07c0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 7 Nov 2019 16:29:22 -0500 Subject: [PATCH 391/689] DOC: Discuss slicer interface --- doc/source/links_names.txt | 1 + doc/source/nibabel_images.rst | 74 +++++++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+) diff --git a/doc/source/links_names.txt b/doc/source/links_names.txt index 7082b812fd..4b37ef9a4d 100644 --- a/doc/source/links_names.txt +++ b/doc/source/links_names.txt @@ -223,6 +223,7 @@ .. _`wikipedia shear matrix`: https://en.wikipedia.org/wiki/Shear_matrix .. _`wikipedia reflection`: https://en.wikipedia.org/wiki/Reflection_(mathematics) .. _`wikipedia direction cosine`: https://en.wikipedia.org/wiki/Direction_cosine +.. _`wikipedia aliasing`: https://en.wikipedia.org/wiki/Aliasing .. Programming ideas .. _proxy: https://en.wikipedia.org/wiki/Proxy_pattern diff --git a/doc/source/nibabel_images.rst b/doc/source/nibabel_images.rst index f14debcc93..2c62fea478 100644 --- a/doc/source/nibabel_images.rst +++ b/doc/source/nibabel_images.rst @@ -282,6 +282,80 @@ True See :doc:`images_and_memory` for more details on managing image memory and controlling the image cache. +.. _image-slicing: + +Image slicing +============= + +At times it is useful to manipulate an image's shape while keeping it in the +same coordinate system. +The ``slicer`` attribute provides an array-slicing interface to produce new +images with an appropriately adjusted header, such that the data at a given +RAS+ location is unchanged. + +>>> cropped_img = img.slicer[32:-32, ...] +>>> cropped_img.shape +(64, 96, 24, 2) + +The data is identical to cropping the data block directly: + +>>> np.array_equal(cropped_img.get_fdata(), img.get_fdata()[32:-32, ...]) +True + +However, unused data did not need to be loaded into memory or scaled. +Additionally, the image affine was adjusted so that the X-translation is +32 voxels (64mm) less: + +>>> cropped_img.affine +array([[ -2. , 0. , 0. , 53.86], + [ -0. , 1.97, -0.36, -35.72], + [ 0. , 0.32, 2.17, -7.25], + [ 0. , 0. , 0. , 1. ]]) + +>>> img.affine - cropped_img.affine +array([[ 0., 0., 0., 64.], + [ 0., 0., 0., 0.], + [ 0., 0., 0., 0.], + [ 0., 0., 0., 0.]]) + +Another use for the slicer object is to choose specific volumes from a +time series: + +>>> vol0 = img.slicer[..., 0] +>>> vol0.shape +(128, 96, 24) + +Or a selection of volumes: + +>>> img.slicer[..., :1].shape +(128, 96, 24, 1) +>>> img.slicer[..., :2].shape +(128, 96, 24, 2) + +It is also possible to use an integer step when slicing, downsampling +the image without filtering. +Note that this *will induce artifacts* in the frequency spectrum +(`aliasing `_) along any axis that is down-sampled. + +>>> downsampled = vol0.slicer[::2, ::2, ::2] +>>> downsampled.header.get_zooms() +(4.0, 4.0, 4.399998) + +Finally, an image can be flipped along an axis, maintaining an appropriate +affine matrix: + +>>> nib.orientations.aff2axcodes(img.affine) +('L', 'A', 'S') +>>> ras = img.slicer[::-1] +>>> nib.orientations.aff2axcodes(ras.affine) +('R', 'A', 'S') +>>> ras.affine +array([[ 2. , 0. , 0. , 117.86], + [ 0. , 1.97, -0.36, -35.72], + [ -0. , 0.32, 2.17, -7.25], + [ 0. , 0. , 0. , 1. ]]) + + ****************** Loading and saving ****************** From d1fc8e184365aff1cfd6f7bfe4974d2fc10bbe55 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Nov 2019 15:38:09 -0500 Subject: [PATCH 392/689] CI: Update release wheel repository --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ea4eb22291..dc47a16a6d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ env: - DEPENDS="numpy scipy matplotlib h5py pillow pydicom indexed_gzip" - INSTALL_TYPE="setup" - CHECK_TYPE="test" - - EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" + - EXTRA_WHEELS="https://3f23b170c54c2533c070-1c8a9b3114517dc5fe17b7c3f8c63a43.ssl.cf2.rackcdn.com" - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" From 3e2c0687934f8de28105b65b90eb9b6ed2f81367 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Nov 2019 15:38:33 -0500 Subject: [PATCH 393/689] CI: Re-add h5py to Py38 tests --- .travis.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index dc47a16a6d..ed8682f431 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,14 +23,11 @@ env: python: - 3.6 - 3.7 + - 3.8 matrix: include: # Basic dependencies only - - python: 3.8 - env: - - DEPENDS="numpy scipy matplotlib~=3.2.0rc1 pillow pydicom indexed_gzip" - # Basic dependencies only - python: 3.5 env: - DEPENDS="-r requirements.txt" From efc44ae63af49a5943bec32f14e4ef19848d522a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Nov 2019 17:43:31 -0500 Subject: [PATCH 394/689] CI: Restore old wheel repository --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ed8682f431..804e198b90 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,9 +15,10 @@ env: - DEPENDS="numpy scipy matplotlib h5py pillow pydicom indexed_gzip" - INSTALL_TYPE="setup" - CHECK_TYPE="test" + - OLD_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" - EXTRA_WHEELS="https://3f23b170c54c2533c070-1c8a9b3114517dc5fe17b7c3f8c63a43.ssl.cf2.rackcdn.com" - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" + - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS --find-links=$OLD_WHEELS" - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" python: From 0f8842c99ef38ad75e562127b6eedd6a3c2db863 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 28 Nov 2019 08:51:50 -0500 Subject: [PATCH 395/689] TEST: Check non-integral slopes, intercepts --- nibabel/tests/test_proxy_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index b8316d7291..1e010055e0 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -216,8 +216,8 @@ def obj_params(self): offsets = (self.header_class().get_data_offset(),) else: offsets = (0, 16) - slopes = (1., 2.) if self.has_slope else (1.,) - inters = (0., 10.) if self.has_inter else (0.,) + slopes = (1., 2., 3.1416) if self.has_slope else (1.,) + inters = (0., 10., 2.7183) if self.has_inter else (0.,) dtypes = (np.uint8, np.int16, np.float32) for shape, dtype, offset, slope, inter in product(self.shapes, dtypes, From 460c8d9386a02eefbc073c03d1ee1e45197b92e3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 28 Nov 2019 09:18:41 -0500 Subject: [PATCH 396/689] CI: Disable pytest for now --- .azure-pipelines/windows.yml | 4 +--- .travis.yml | 1 - 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 2d63db68e0..f825bef612 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -29,8 +29,7 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% - python -m pip install nose mock coverage codecov - python -m pip install pytest + python -m pip install nose mock coverage codecov pytest displayName: 'Install dependencies' - script: | python -m pip install . @@ -41,7 +40,6 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_affines.py ../nibabel/tests/test_volumeutils.py displayName: 'Nose tests' - script: | cd for_testing diff --git a/.travis.yml b/.travis.yml index 804e198b90..81d3589769 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,7 +130,6 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_affines.py ../nibabel/tests/test_volumeutils.py else false fi From 381ad1c0a98f5d4d7df5e34cc803811e952d3784 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 2 Dec 2019 09:59:29 -0500 Subject: [PATCH 397/689] TEST: Test supported dtypes for Analyze/NIfTI/MGH --- nibabel/tests/test_proxy_api.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 1e010055e0..dd48e95506 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -47,6 +47,7 @@ from .._h5py_compat import h5py, have_h5py from .. import ecat from .. import parrec +from ..casting import have_binary128 from ..arrayproxy import ArrayProxy, is_proxy @@ -192,6 +193,7 @@ class TestAnalyzeProxyAPI(_TestProxyAPI): shapes = ((2,), (2, 3), (2, 3, 4), (2, 3, 4, 5)) has_slope = False has_inter = False + data_dtypes = (np.uint8, np.int16, np.int32, np.float32, np.complex64, np.float64) array_order = 'F' # Cannot set offset for Freesurfer settable_offset = True @@ -218,9 +220,8 @@ def obj_params(self): offsets = (0, 16) slopes = (1., 2., 3.1416) if self.has_slope else (1.,) inters = (0., 10., 2.7183) if self.has_inter else (0.,) - dtypes = (np.uint8, np.int16, np.float32) for shape, dtype, offset, slope, inter in product(self.shapes, - dtypes, + self.data_dtypes, offsets, slopes, inters): @@ -325,6 +326,10 @@ class TestSpm2AnalyzeProxyAPI(TestSpm99AnalyzeProxyAPI): class TestNifti1ProxyAPI(TestSpm99AnalyzeProxyAPI): header_class = Nifti1Header has_inter = True + data_dtypes = (np.uint8, np.int16, np.int32, np.float32, np.complex64, np.float64, + np.int8, np.uint16, np.uint32, np.int64, np.uint64, np.complex128) + if have_binary128(): + data_dtypes.extend(np.float128, np.complex256) class TestMGHAPI(TestAnalyzeProxyAPI): @@ -334,6 +339,7 @@ class TestMGHAPI(TestAnalyzeProxyAPI): has_inter = False settable_offset = False data_endian = '>' + data_dtypes = (np.uint8, np.int16, np.int32, np.float32) class TestMinc1API(_TestProxyAPI): From 8cc2629613656de328f1ffcdbc601bf641db0a2f Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 2 Dec 2019 10:22:49 -0500 Subject: [PATCH 398/689] TEST: Refine parameter and assertion precision --- nibabel/tests/test_proxy_api.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index dd48e95506..51edf2af7f 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -55,7 +55,7 @@ from nose.tools import (assert_true, assert_false, assert_raises, assert_equal, assert_not_equal, assert_greater_equal) -from numpy.testing import (assert_almost_equal, assert_array_equal) +from numpy.testing import assert_almost_equal, assert_array_equal, assert_allclose from ..testing import data_path as DATA_PATH, assert_dt_equal @@ -143,7 +143,10 @@ def validate_get_scaled(self, pmaker, params): for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']: out = prox.get_scaled(dtype=dtype) - assert_almost_equal(out, params['arr_out']) + # Half-precision is imprecise. Obviously. It's a bad idea, but don't break + # the test over it. + rtol = 1e-03 if dtype == np.float16 else 1e-05 + assert_allclose(out, params['arr_out'].astype(out.dtype), rtol=rtol, atol=1e-08) assert_greater_equal(out.dtype, np.dtype(dtype)) # Shape matches expected shape assert_equal(out.shape, params['shape']) @@ -218,8 +221,8 @@ def obj_params(self): offsets = (self.header_class().get_data_offset(),) else: offsets = (0, 16) - slopes = (1., 2., 3.1416) if self.has_slope else (1.,) - inters = (0., 10., 2.7183) if self.has_inter else (0.,) + slopes = (1., 2., float(np.float32(3.1416))) if self.has_slope else (1.,) + inters = (0., 10., float(np.float32(2.7183))) if self.has_inter else (0.,) for shape, dtype, offset, slope, inter in product(self.shapes, self.data_dtypes, offsets, @@ -263,7 +266,7 @@ def sio_func(): dtype=dtype, dtype_out=dtype_out, arr=arr.copy(), - arr_out=arr * slope + inter, + arr_out=arr.astype(dtype_out) * slope + inter, shape=shape, offset=offset, slope=slope, From 779f27b0b1a2b73978b01db1c9c38f2e898c66b8 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 2 Dec 2019 10:24:27 -0500 Subject: [PATCH 399/689] FIX: Cast scaling parameters only when safe --- nibabel/arrayproxy.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index eda456c4b7..b8313e8ae4 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -360,10 +360,13 @@ def _get_scaled(self, dtype, slicer): scl_slope = np.asanyarray(self._slope) scl_inter = np.asanyarray(self._inter) use_dtype = scl_slope.dtype if dtype is None else dtype - slope = scl_slope.astype(use_dtype) - inter = scl_inter.astype(use_dtype) + + if np.can_cast(scl_slope, use_dtype): + scl_slope = scl_slope.astype(use_dtype) + if np.can_cast(scl_inter, use_dtype): + scl_inter = scl_inter.astype(use_dtype) # Read array and upcast as necessary for big slopes, intercepts - scaled = apply_read_scaling(self._get_unscaled(slicer=slicer), slope, inter) + scaled = apply_read_scaling(self._get_unscaled(slicer=slicer), scl_slope, scl_inter) if dtype is not None: scaled = scaled.astype(np.promote_types(scaled.dtype, dtype), copy=False) return scaled From b1f6e2cc4e53af880e70d9de5436c05e4aea7627 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 3 Dec 2019 12:13:30 -0500 Subject: [PATCH 400/689] TEST: Add comment to explain parameters --- nibabel/tests/test_proxy_api.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 51edf2af7f..1694254a0c 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -221,6 +221,8 @@ def obj_params(self): offsets = (self.header_class().get_data_offset(),) else: offsets = (0, 16) + # For non-integral parameters, cast to float32 value can be losslessly cast + # later, enabling exact checks, then back to float for consistency slopes = (1., 2., float(np.float32(3.1416))) if self.has_slope else (1.,) inters = (0., 10., float(np.float32(2.7183))) if self.has_inter else (0.,) for shape, dtype, offset, slope, inter in product(self.shapes, From ccd745347840f147a480cbdf9351df83db69c14d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 19 Nov 2019 09:50:26 -0500 Subject: [PATCH 401/689] FIX: Accept dtype parameter to ArrayProxy.__array__ --- nibabel/arrayproxy.py | 7 +++++-- nibabel/ecat.py | 5 +++-- nibabel/minc1.py | 7 +++++-- nibabel/parrec.py | 7 +++++-- 4 files changed, 18 insertions(+), 8 deletions(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index b8313e8ae4..d7d96b24fb 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -405,8 +405,11 @@ def get_scaled(self, dtype=None): """ return self._get_scaled(dtype=dtype, slicer=()) - def __array__(self): - return self._get_scaled(dtype=None, slicer=()) + def __array__(self, dtype=None): + arr = self._get_scaled(dtype=dtype, slicer=()) + if dtype is not None: + arr = arr.astype(dtype, copy=False) + return arr def __getitem__(self, slicer): return self._get_scaled(dtype=None, slicer=slicer) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index a0923f0753..7686a8098b 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -689,7 +689,7 @@ def ndim(self): def is_proxy(self): return True - def __array__(self): + def __array__(self, dtype=None): ''' Read of data from file This reads ALL FRAMES into one array, can be memory expensive. @@ -697,7 +697,8 @@ def __array__(self): If you want to read only some slices, use the slicing syntax (``__getitem__``) below, or ``subheader.data_from_fileobj(frame)`` ''' - data = np.empty(self.shape) + # dtype=None is interpreted as float64 + data = np.empty(self.shape, dtype=dtype) frame_mapping = get_frame_order(self._subheader._mlist) for i in sorted(frame_mapping): data[:, :, :, i] = self._subheader.data_from_fileobj( diff --git a/nibabel/minc1.py b/nibabel/minc1.py index c41af5c454..2c54be841e 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -290,9 +290,12 @@ def get_scaled(self, dtype=None): """ return self._get_scaled(dtype=dtype, slicer=()) - def __array__(self): + def __array__(self, dtype=None): ''' Read of data from file ''' - return self._get_scaled(dtype=None, slicer=()) + arr = self._get_scaled(dtype=dtype, slicer=()) + if dtype is not None: + arr = arr.astype(dtype, copy=False) + return arr def __getitem__(self, sliceobj): """ Read slice `sliceobj` of data from file """ diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 59eac79809..5e26f67e0a 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -699,8 +699,11 @@ def get_scaled(self, dtype=None): """ return self._get_scaled(dtype=dtype, slicer=()) - def __array__(self): - return self._get_scaled(dtype=None, slicer=()) + def __array__(self, dtype=None): + arr = self._get_scaled(dtype=dtype, slicer=()) + if dtype is not None: + arr = arr.astype(dtype, copy=False) + return arr def __getitem__(self, slicer): return self._get_scaled(dtype=None, slicer=slicer) From 5b76c8e4dd039305dac0836b2871b4ca59562037 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 19 Nov 2019 10:04:44 -0500 Subject: [PATCH 402/689] TEST: Validate dataobj.__array__(dtype) --- nibabel/tests/test_proxy_api.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 1694254a0c..b0adc52987 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -132,6 +132,27 @@ def validate_asarray(self, pmaker, params): # Shape matches expected shape assert_equal(out.shape, params['shape']) + def validate_array_interface_with_dtype(self, pmaker, params): + # Check proxy returns expected array from asarray + prox, fio, hdr = pmaker() + orig = np.array(prox, dtype=None) + assert_array_equal(orig, params['arr_out']) + assert_dt_equal(orig.dtype, params['dtype_out']) + + for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']: + # Directly coerce with a dtype + direct = dtype(prox) + assert_almost_equal(direct, orig.astype(dtype)) + assert_dt_equal(direct.dtype, np.dtype(dtype)) + assert_equal(direct.shape, params['shape']) + # All three methods should produce equivalent results + for arrmethod in (np.array, np.asarray, np.asanyarray): + out = arrmethod(prox, dtype=dtype) + assert_array_equal(out, direct) + assert_dt_equal(out.dtype, np.dtype(dtype)) + # Shape matches expected shape + assert_equal(out.shape, params['shape']) + def validate_get_scaled(self, pmaker, params): # Check proxy returns expected array from asarray prox, fio, hdr = pmaker() From b457534f5a34c7eef91a11b7abdf15deab2b7ce5 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 26 Nov 2019 10:22:29 -0500 Subject: [PATCH 403/689] RF: Drop (mostly) redundant ArrayProxy.get_scaled() method --- nibabel/arrayproxy.py | 29 ++++++++++---------------- nibabel/dataobj_images.py | 11 +++------- nibabel/ecat.py | 36 +++++++++------------------------ nibabel/minc1.py | 32 ++++++++--------------------- nibabel/parrec.py | 23 ++++++++------------- nibabel/tests/test_proxy_api.py | 24 ++++------------------ 6 files changed, 44 insertions(+), 111 deletions(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index d7d96b24fb..c4189e61e8 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -378,34 +378,27 @@ def get_unscaled(self): """ return self._get_unscaled(slicer=()) - def get_scaled(self, dtype=None): - """ Read data from file and apply scaling + def __array__(self, dtype=None): + """ Read data from file and apply scaling, casting to ``dtype`` - The dtype of the returned array is the narrowest dtype that can - represent the data without overflow, and is at least as wide as - the dtype parameter. + If ``dtype`` is unspecified, the dtype of the returned array is the + narrowest dtype that can represent the data without overflow. + Generally, it is the wider of the dtypes of the slopes or intercepts. - If dtype is unspecified, it is the wider of the dtypes of the slope - or intercept. This will generally be determined by the parameter - size in the image header, and so should be consistent for a given - image format, but may vary across formats. Notably, these factors - are single-precision (32-bit) floats for NIfTI-1 and double-precision - (64-bit) floats for NIfTI-2. + The types of the scale factors will generally be determined by the + parameter size in the image header, and so should be consistent for a + given image format, but may vary across formats. Parameters ---------- - dtype : numpy dtype specifier - A numpy dtype specifier specifying the narrowest acceptable - dtype. + dtype : numpy dtype specifier, optional + A numpy dtype specifier specifying the type of the returned array. Returns ------- array - Scaled of image data of data type `dtype`. + Scaled image data with type `dtype`. """ - return self._get_scaled(dtype=dtype, slicer=()) - - def __array__(self, dtype=None): arr = self._get_scaled(dtype=dtype, slicer=()) if dtype is not None: arr = arr.astype(dtype, copy=False) diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index dd4c853537..4b4d1b55d8 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -10,7 +10,6 @@ import numpy as np -from .arrayproxy import is_proxy from .filebasedimages import FileBasedImage from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version @@ -351,14 +350,10 @@ def get_fdata(self, caching='fill', dtype=np.float64): if self._fdata_cache is not None: if self._fdata_cache.dtype.type == dtype.type: return self._fdata_cache - dataobj = self._dataobj - # Attempt to confine data array to dtype during scaling - # On overflow, may still upcast - if is_proxy(dataobj): - dataobj = dataobj.get_scaled(dtype=dtype) # Always return requested data type - # For array proxies, will only copy on overflow - data = np.asanyarray(dataobj, dtype=dtype) + # For array proxies, will attempt to confine data array to dtype + # during scaling + data = np.asanyarray(self._dataobj, dtype=dtype) if caching == 'fill': self._fdata_cache = data return data diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 7686a8098b..b15d6906ea 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -696,6 +696,16 @@ def __array__(self, dtype=None): If you want to read only some slices, use the slicing syntax (``__getitem__``) below, or ``subheader.data_from_fileobj(frame)`` + + Parameters + ---------- + dtype : numpy dtype specifier, optional + A numpy dtype specifier specifying the type of the returned array. + + Returns + ------- + array + Scaled image data with type `dtype`. ''' # dtype=None is interpreted as float64 data = np.empty(self.shape, dtype=dtype) @@ -705,32 +715,6 @@ def __array__(self, dtype=None): frame_mapping[i][0]) return data - def get_scaled(self, dtype=None): - """ Read data from file and apply scaling - - The dtype of the returned array is the narrowest dtype that can - represent the data without overflow, and is at least as wide as - the dtype parameter. - - If dtype is unspecified, it is automatically determined. - - Parameters - ---------- - dtype : numpy dtype specifier - A numpy dtype specifier specifying the narrowest acceptable - dtype. - - Returns - ------- - array - Scaled of image data of data type `dtype`. - """ - data = self.__array__() - if dtype is None: - return data - final_type = np.promote_types(data.dtype, dtype) - return data.astype(final_type, copy=False) - def __getitem__(self, sliceobj): """ Return slice `sliceobj` from ECAT data, optimizing if possible """ diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 2c54be841e..6137d11a65 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -261,45 +261,29 @@ def ndim(self): def is_proxy(self): return True - def _get_scaled(self, dtype, slicer): - data = self.minc_file.get_scaled_data(slicer) - if dtype is None: - return data - final_type = np.promote_types(data.dtype, dtype) - return data.astype(final_type, copy=False) - - def get_scaled(self, dtype=None): - """ Read data from file and apply scaling - - The dtype of the returned array is the narrowest dtype that can - represent the data without overflow, and is at least as wide as - the dtype parameter. + def __array__(self, dtype=None): + """ Read data from file and apply scaling, casting to ``dtype`` - If dtype is unspecified, it is automatically determined. + If ``dtype`` is unspecified, the dtype is automatically determined. Parameters ---------- - dtype : numpy dtype specifier - A numpy dtype specifier specifying the narrowest acceptable - dtype. + dtype : numpy dtype specifier, optional + A numpy dtype specifier specifying the type of the returned array. Returns ------- array - Scaled of image data of data type `dtype`. + Scaled image data with type `dtype`. """ - return self._get_scaled(dtype=dtype, slicer=()) - - def __array__(self, dtype=None): - ''' Read of data from file ''' - arr = self._get_scaled(dtype=dtype, slicer=()) + arr = self.minc_file.get_scaled_data(sliceobj=()) if dtype is not None: arr = arr.astype(dtype, copy=False) return arr def __getitem__(self, sliceobj): """ Read slice `sliceobj` of data from file """ - return self._get_scaled(dtype=None, slicer=sliceobj) + return self.minc_file.get_scaled_data(sliceobj) class MincHeader(SpatialHeader): diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 5e26f67e0a..431e043d12 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -676,30 +676,23 @@ def get_unscaled(self): """ return self._get_unscaled(slicer=()) - def get_scaled(self, dtype=None): - """ Read data from file and apply scaling - - The dtype of the returned array is the narrowest dtype that can - represent the data without overflow, and is at least as wide as - the dtype parameter. + def __array__(self, dtype=None): + """ Read data from file and apply scaling, casting to ``dtype`` - If dtype is unspecified, it is the wider of the dtypes of the slopes - or intercepts + If ``dtype`` is unspecified, the dtype of the returned array is the + narrowest dtype that can represent the data without overflow. + Generally, it is the wider of the dtypes of the slopes or intercepts. Parameters ---------- - dtype : numpy dtype specifier - A numpy dtype specifier specifying the narrowest acceptable - dtype. + dtype : numpy dtype specifier, optional + A numpy dtype specifier specifying the type of the returned array. Returns ------- array - Scaled of image data of data type `dtype`. + Scaled image data with type `dtype`. """ - return self._get_scaled(dtype=dtype, slicer=()) - - def __array__(self, dtype=None): arr = self._get_scaled(dtype=dtype, slicer=()) if dtype is not None: arr = arr.astype(dtype, copy=False) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index b0adc52987..585d5e1be6 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -142,7 +142,10 @@ def validate_array_interface_with_dtype(self, pmaker, params): for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']: # Directly coerce with a dtype direct = dtype(prox) - assert_almost_equal(direct, orig.astype(dtype)) + # Half-precision is imprecise. Obviously. It's a bad idea, but don't break + # the test over it. + rtol = 1e-03 if dtype == np.float16 else 1e-05 + assert_allclose(direct, orig.astype(dtype), rtol=rtol, atol=1e-08) assert_dt_equal(direct.dtype, np.dtype(dtype)) assert_equal(direct.shape, params['shape']) # All three methods should produce equivalent results @@ -153,25 +156,6 @@ def validate_array_interface_with_dtype(self, pmaker, params): # Shape matches expected shape assert_equal(out.shape, params['shape']) - def validate_get_scaled(self, pmaker, params): - # Check proxy returns expected array from asarray - prox, fio, hdr = pmaker() - out = prox.get_scaled() - assert_array_equal(out, params['arr_out']) - assert_dt_equal(out.dtype, params['dtype_out']) - # Shape matches expected shape - assert_equal(out.shape, params['shape']) - - for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']: - out = prox.get_scaled(dtype=dtype) - # Half-precision is imprecise. Obviously. It's a bad idea, but don't break - # the test over it. - rtol = 1e-03 if dtype == np.float16 else 1e-05 - assert_allclose(out, params['arr_out'].astype(out.dtype), rtol=rtol, atol=1e-08) - assert_greater_equal(out.dtype, np.dtype(dtype)) - # Shape matches expected shape - assert_equal(out.shape, params['shape']) - def validate_header_isolated(self, pmaker, params): # Confirm altering input header has no effect # Depends on header providing 'get_data_dtype', 'set_data_dtype', From 8630b26ca393a2ab0f1b6b7717910489e93af45c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 27 Nov 2019 15:31:40 -0500 Subject: [PATCH 404/689] DOC: Update changelog --- Changelog | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/Changelog b/Changelog index f9f4f00df5..165f1db94b 100644 --- a/Changelog +++ b/Changelog @@ -30,10 +30,13 @@ References like "pr/298" refer to github pull request numbers. New features ------------ -* ArrayProxy method ``get_scaled()`` scales data with a dtype of a - specified precision, promoting as necessary to avoid overflow. This - is to used in ``img.get_fdata()`` to control memory usage. (pr/833) - (CM, reviewed by Ross Markello) +* ArrayProxy ``__array__()`` now accepts a ``dtype`` parameter, allowing + ``numpy.array(dataobj, dtype=...)`` calls, as well as casting directly + with a dtype (for example, ``numpy.float32(dataobj)``) to control the + output type. Scale factors (slope, intercept) are applied, but may be + cast to narrower types, to control memory usage. This is now the basis + of ``img.get_fdata()``, which will scale data in single precision if + the output type is ``float32``. (pr/844) (CM, reviewed by ...) * GiftiImage method ``agg_data()`` to return usable data arrays (pr/793) (Hao-Ting Wang, reviewed by CM) * Accept ``os.PathLike`` objects in place of filenames (pr/610) (Cameron From 5e5ec22858eb111590a05295e16529f038cf0a34 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 2 Dec 2019 11:33:01 -0500 Subject: [PATCH 405/689] TEST: Filter complex warnings --- nibabel/tests/test_proxy_api.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 585d5e1be6..48a024795d 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -57,7 +57,7 @@ from numpy.testing import assert_almost_equal, assert_array_equal, assert_allclose -from ..testing import data_path as DATA_PATH, assert_dt_equal +from ..testing import data_path as DATA_PATH, assert_dt_equal, clear_and_catch_warnings from ..tmpdirs import InTemporaryDirectory @@ -139,6 +139,12 @@ def validate_array_interface_with_dtype(self, pmaker, params): assert_array_equal(orig, params['arr_out']) assert_dt_equal(orig.dtype, params['dtype_out']) + context = None + if np.issubdtype(orig.dtype, np.complexfloating): + context = clear_and_catch_warnings() + context.__enter__() + warnings.simplefilter('ignore', np.ComplexWarning) + for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']: # Directly coerce with a dtype direct = dtype(prox) @@ -156,6 +162,9 @@ def validate_array_interface_with_dtype(self, pmaker, params): # Shape matches expected shape assert_equal(out.shape, params['shape']) + if context is not None: + context.__exit__() + def validate_header_isolated(self, pmaker, params): # Confirm altering input header has no effect # Depends on header providing 'get_data_dtype', 'set_data_dtype', From 70987fb327bd823750bef2c8ff73f7789eebf56d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 2 Dec 2019 14:33:10 -0500 Subject: [PATCH 406/689] TEST: Improve test naming for tracking down failures --- nibabel/tests/test_api_validators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_api_validators.py b/nibabel/tests/test_api_validators.py index a7cbb8b555..41d3f41110 100644 --- a/nibabel/tests/test_api_validators.py +++ b/nibabel/tests/test_api_validators.py @@ -19,7 +19,7 @@ def meth(self): for imaker, params in self.obj_params(): validator(self, imaker, params) meth.__name__ = 'test_' + name[len('validate_'):] - meth.__doc__ = 'autogenerated test from ' + name + meth.__doc__ = 'autogenerated test from {}.{}'.format(klass.__name__, name) return meth for name in dir(klass): if not name.startswith('validate_'): From 3351d16bbb930350378f395dbd22cde329b90e8c Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 5 Dec 2019 21:50:07 -0500 Subject: [PATCH 407/689] FIX: ECAT data must be coerced after reading --- nibabel/ecat.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index b15d6906ea..1814b9147c 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -708,11 +708,13 @@ def __array__(self, dtype=None): Scaled image data with type `dtype`. ''' # dtype=None is interpreted as float64 - data = np.empty(self.shape, dtype=dtype) + data = np.empty(self.shape) frame_mapping = get_frame_order(self._subheader._mlist) for i in sorted(frame_mapping): data[:, :, :, i] = self._subheader.data_from_fileobj( frame_mapping[i][0]) + if dtype is not None: + data = data.astype(dtype, copy=False) return data def __getitem__(self, sliceobj): From 71e3cccda997837ca6820d4edb90060eafe670a8 Mon Sep 17 00:00:00 2001 From: Ross Markello Date: Fri, 6 Dec 2019 10:03:17 -0500 Subject: [PATCH 408/689] [REF] Removes keywordonly methods + functions Uses built-in Python kw-only syntax (*) instead. --- nibabel/analyze.py | 4 +-- nibabel/arrayproxy.py | 4 +-- nibabel/brikhead.py | 7 ++--- nibabel/cifti2/cifti2.py | 4 +-- nibabel/dataobj_images.py | 7 ++--- nibabel/ecat.py | 4 +-- nibabel/freesurfer/mghformat.py | 4 +-- nibabel/keywordonly.py | 28 -------------------- nibabel/minc1.py | 4 +-- nibabel/minc2.py | 4 +-- nibabel/parrec.py | 10 +++---- nibabel/spm99analyze.py | 4 +-- nibabel/tests/test_keywordonly.py | 43 ------------------------------- 13 files changed, 15 insertions(+), 112 deletions(-) delete mode 100644 nibabel/keywordonly.py delete mode 100644 nibabel/tests/test_keywordonly.py diff --git a/nibabel/analyze.py b/nibabel/analyze.py index dc352505c6..a5ff79d83b 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -95,7 +95,6 @@ from .fileholders import copy_file_map from .batteryrunners import Report from .arrayproxy import ArrayProxy -from .keywordonly import kw_only_meth # Sub-parts of standard analyze header from # Mayo dbh.h file @@ -933,8 +932,7 @@ def set_data_dtype(self, dtype): self._header.set_data_dtype(dtype) @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): ''' Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index b8313e8ae4..5852191968 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -34,7 +34,6 @@ from .deprecated import deprecate_with_version from .volumeutils import array_from_file, apply_read_scaling from .fileslice import fileslice, canonical_slicers -from .keywordonly import kw_only_meth from . import openers @@ -96,8 +95,7 @@ class ArrayProxy(object): order = 'F' _header = None - @kw_only_meth(2) - def __init__(self, file_like, spec, mmap=True, keep_file_open=None): + def __init__(self, file_like, spec, *, mmap=True, keep_file_open=None): """Initialize array proxy instance .. deprecated:: 2.4.1 diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 7ef1386872..7e34c36b36 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -36,7 +36,6 @@ from .arrayproxy import ArrayProxy from .fileslice import strided_scalar -from .keywordonly import kw_only_meth from .spatialimages import ( SpatialImage, SpatialHeader, @@ -220,8 +219,7 @@ class AFNIArrayProxy(ArrayProxy): None """ - @kw_only_meth(2) - def __init__(self, file_like, header, mmap=True, keep_file_open=None): + def __init__(self, file_like, header, *, mmap=True, keep_file_open=None): """ Initialize AFNI array proxy @@ -504,8 +502,7 @@ class AFNIImage(SpatialImage): ImageArrayProxy = AFNIArrayProxy @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): """ Creates an AFNIImage instance from `file_map` diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 1a5307eba5..a19a04a02f 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -24,7 +24,6 @@ from ..dataobj_images import DataobjImage from ..nifti2 import Nifti2Image, Nifti2Header from ..arrayproxy import reshape_dataobj -from ..keywordonly import kw_only_meth from warnings import warn @@ -1389,8 +1388,7 @@ def nifti_header(self): return self._nifti_header @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): """ Load a CIFTI-2 image from a file_map Parameters diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index dd4c853537..bc8ef07f03 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -12,7 +12,6 @@ from .arrayproxy import is_proxy from .filebasedimages import FileBasedImage -from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version @@ -420,8 +419,7 @@ def get_shape(self): return self.shape @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): ''' Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 @@ -458,8 +456,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): raise NotImplementedError @classmethod - @kw_only_meth(1) - def from_filename(klass, filename, mmap=True, keep_file_open=None): + def from_filename(klass, filename, *, mmap=True, keep_file_open=None): '''Class method to create image from filename `filename` .. deprecated:: 2.4.1 diff --git a/nibabel/ecat.py b/nibabel/ecat.py index a0923f0753..4a2b55551f 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -54,7 +54,6 @@ from .arraywriters import make_array_writer from .wrapstruct import WrapStruct from .fileslice import canonical_slicers, predict_shape, slice2outax -from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version BLOCK_SIZE = 512 @@ -901,8 +900,7 @@ def _get_fileholders(file_map): return file_map['header'], file_map['image'] @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): """class method to create image from mapping specified in file_map """ diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 6eb0f156e9..8324c112e0 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -21,7 +21,6 @@ from ..spatialimages import HeaderDataError, SpatialImage from ..fileholders import FileHolder from ..arrayproxy import ArrayProxy, reshape_dataobj -from ..keywordonly import kw_only_meth from ..openers import ImageOpener from ..batteryrunners import BatteryRunner, Report from ..wrapstruct import LabeledWrapStruct @@ -537,8 +536,7 @@ def filespec_to_file_map(klass, filespec): return super(MGHImage, klass).filespec_to_file_map(filespec) @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): ''' Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 diff --git a/nibabel/keywordonly.py b/nibabel/keywordonly.py deleted file mode 100644 index 8cb4908c1e..0000000000 --- a/nibabel/keywordonly.py +++ /dev/null @@ -1,28 +0,0 @@ -""" Decorator for labeling keyword arguments as keyword only -""" - -from functools import wraps - - -def kw_only_func(n): - """ Return function decorator enforcing maximum of `n` positional arguments - """ - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - if len(args) > n: - raise TypeError( - '{0} takes at most {1} positional argument{2}'.format( - func.__name__, n, 's' if n > 1 else '')) - return func(*args, **kwargs) - return wrapper - return decorator - - -def kw_only_meth(n): - """ Return method decorator enforcing maximum of `n` positional arguments - - The method has at least one positional argument ``self`` or ``cls``; allow - for that. - """ - return kw_only_func(n + 1) diff --git a/nibabel/minc1.py b/nibabel/minc1.py index c41af5c454..a4c1f3c832 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -17,7 +17,6 @@ from .spatialimages import SpatialHeader, SpatialImage from .fileslice import canonical_slicers -from .keywordonly import kw_only_meth from .deprecated import deprecate_with_version _dt_dict = { @@ -340,8 +339,7 @@ class Minc1Image(SpatialImage): ImageArrayProxy = MincImageArrayProxy @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): # Note that mmap and keep_file_open are included for proper with file_map['image'].get_prepare_fileobj() as fobj: minc_file = Minc1File(netcdf_file(fobj)) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index b27d43f77f..a73114081c 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -27,7 +27,6 @@ """ import numpy as np -from .keywordonly import kw_only_meth from ._h5py_compat import h5py from .minc1 import Minc1File, MincHeader, Minc1Image, MincError @@ -158,8 +157,7 @@ class Minc2Image(Minc1Image): header_class = Minc2Header @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): holder = file_map['image'] if holder.filename is None: raise MincError('MINC2 needs filename for load') diff --git a/nibabel/parrec.py b/nibabel/parrec.py index 59eac79809..98ae00587f 100644 --- a/nibabel/parrec.py +++ b/nibabel/parrec.py @@ -130,7 +130,6 @@ from locale import getpreferredencoding from collections import OrderedDict -from .keywordonly import kw_only_meth from .spatialimages import SpatialHeader, SpatialImage from .eulerangles import euler2mat from .volumeutils import Recoder, array_from_file @@ -584,8 +583,7 @@ def exts2pars(exts_source): class PARRECArrayProxy(object): - @kw_only_meth(2) - def __init__(self, file_like, header, mmap=True, scaling='dv'): + def __init__(self, file_like, header, *, mmap=True, scaling='dv'): """ Initialize PARREC array proxy Parameters @@ -1277,8 +1275,7 @@ class PARRECImage(SpatialImage): ImageArrayProxy = PARRECArrayProxy @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, permit_truncated=False, + def from_file_map(klass, file_map, *, mmap=True, permit_truncated=False, scaling='dv', strict_sort=False): """ Create PARREC image from file map `file_map` @@ -1318,8 +1315,7 @@ def from_file_map(klass, file_map, mmap=True, permit_truncated=False, file_map=file_map) @classmethod - @kw_only_meth(1) - def from_filename(klass, filename, mmap=True, permit_truncated=False, + def from_filename(klass, filename, *, mmap=True, permit_truncated=False, scaling='dv', strict_sort=False): """ Create PARREC image from filename `filename` diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index d420bb0c2e..09b857ba32 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -16,7 +16,6 @@ from .batteryrunners import Report from . import analyze # module import -from .keywordonly import kw_only_meth from .optpkg import optional_package have_scipy = optional_package('scipy')[1] @@ -244,8 +243,7 @@ class Spm99AnalyzeImage(analyze.AnalyzeImage): rw = have_scipy @classmethod - @kw_only_meth(1) - def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): ''' Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 diff --git a/nibabel/tests/test_keywordonly.py b/nibabel/tests/test_keywordonly.py deleted file mode 100644 index 0ef63d9b13..0000000000 --- a/nibabel/tests/test_keywordonly.py +++ /dev/null @@ -1,43 +0,0 @@ -""" Test kw_only decorators """ - -from ..keywordonly import kw_only_func, kw_only_meth - -from nose.tools import assert_equal -from nose.tools import assert_raises - - -def test_kw_only_func(): - # Test decorator - def func(an_arg): - "My docstring" - return an_arg - assert_equal(func(1), 1) - assert_raises(TypeError, func, 1, 2) - dec_func = kw_only_func(1)(func) - assert_equal(dec_func(1), 1) - assert_raises(TypeError, dec_func, 1, 2) - assert_raises(TypeError, dec_func, 1, akeyarg=3) - assert_equal(dec_func.__doc__, 'My docstring') - - @kw_only_func(1) - def kw_func(an_arg, a_kwarg='thing'): - "Another docstring" - return an_arg, a_kwarg - assert_equal(kw_func(1), (1, 'thing')) - assert_raises(TypeError, kw_func, 1, 2) - assert_equal(kw_func(1, a_kwarg=2), (1, 2)) - assert_raises(TypeError, kw_func, 1, akeyarg=3) - assert_equal(kw_func.__doc__, 'Another docstring') - - class C(object): - - @kw_only_meth(1) - def kw_meth(self, an_arg, a_kwarg='thing'): - "Method docstring" - return an_arg, a_kwarg - c = C() - assert_equal(c.kw_meth(1), (1, 'thing')) - assert_raises(TypeError, c.kw_meth, 1, 2) - assert_equal(c.kw_meth(1, a_kwarg=2), (1, 2)) - assert_raises(TypeError, c.kw_meth, 1, akeyarg=3) - assert_equal(c.kw_meth.__doc__, 'Method docstring') From 540e85e8d84df540436ed6854b4064b31e0e1786 Mon Sep 17 00:00:00 2001 From: Ross Markello Date: Fri, 6 Dec 2019 11:44:36 -0500 Subject: [PATCH 409/689] REF: Undelete keywordonly.py module --- nibabel/keywordonly.py | 28 ++++++++++++++++++++ nibabel/tests/test_keywordonly.py | 43 +++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 nibabel/keywordonly.py create mode 100644 nibabel/tests/test_keywordonly.py diff --git a/nibabel/keywordonly.py b/nibabel/keywordonly.py new file mode 100644 index 0000000000..8cb4908c1e --- /dev/null +++ b/nibabel/keywordonly.py @@ -0,0 +1,28 @@ +""" Decorator for labeling keyword arguments as keyword only +""" + +from functools import wraps + + +def kw_only_func(n): + """ Return function decorator enforcing maximum of `n` positional arguments + """ + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + if len(args) > n: + raise TypeError( + '{0} takes at most {1} positional argument{2}'.format( + func.__name__, n, 's' if n > 1 else '')) + return func(*args, **kwargs) + return wrapper + return decorator + + +def kw_only_meth(n): + """ Return method decorator enforcing maximum of `n` positional arguments + + The method has at least one positional argument ``self`` or ``cls``; allow + for that. + """ + return kw_only_func(n + 1) diff --git a/nibabel/tests/test_keywordonly.py b/nibabel/tests/test_keywordonly.py new file mode 100644 index 0000000000..0ef63d9b13 --- /dev/null +++ b/nibabel/tests/test_keywordonly.py @@ -0,0 +1,43 @@ +""" Test kw_only decorators """ + +from ..keywordonly import kw_only_func, kw_only_meth + +from nose.tools import assert_equal +from nose.tools import assert_raises + + +def test_kw_only_func(): + # Test decorator + def func(an_arg): + "My docstring" + return an_arg + assert_equal(func(1), 1) + assert_raises(TypeError, func, 1, 2) + dec_func = kw_only_func(1)(func) + assert_equal(dec_func(1), 1) + assert_raises(TypeError, dec_func, 1, 2) + assert_raises(TypeError, dec_func, 1, akeyarg=3) + assert_equal(dec_func.__doc__, 'My docstring') + + @kw_only_func(1) + def kw_func(an_arg, a_kwarg='thing'): + "Another docstring" + return an_arg, a_kwarg + assert_equal(kw_func(1), (1, 'thing')) + assert_raises(TypeError, kw_func, 1, 2) + assert_equal(kw_func(1, a_kwarg=2), (1, 2)) + assert_raises(TypeError, kw_func, 1, akeyarg=3) + assert_equal(kw_func.__doc__, 'Another docstring') + + class C(object): + + @kw_only_meth(1) + def kw_meth(self, an_arg, a_kwarg='thing'): + "Method docstring" + return an_arg, a_kwarg + c = C() + assert_equal(c.kw_meth(1), (1, 'thing')) + assert_raises(TypeError, c.kw_meth, 1, 2) + assert_equal(c.kw_meth(1, a_kwarg=2), (1, 2)) + assert_raises(TypeError, c.kw_meth, 1, akeyarg=3) + assert_equal(c.kw_meth.__doc__, 'Method docstring') From e69ec32d45556d79deb503d4ebc1446fef93c979 Mon Sep 17 00:00:00 2001 From: Ross Markello Date: Fri, 6 Dec 2019 11:59:19 -0500 Subject: [PATCH 410/689] REF: Add keywordonly module to deprecation cycle Marked for removal in version 5.0.0 --- nibabel/keywordonly.py | 7 +++++++ nibabel/tests/test_removalschedule.py | 1 + 2 files changed, 8 insertions(+) diff --git a/nibabel/keywordonly.py b/nibabel/keywordonly.py index 8cb4908c1e..198e70f2c9 100644 --- a/nibabel/keywordonly.py +++ b/nibabel/keywordonly.py @@ -2,6 +2,13 @@ """ from functools import wraps +import warnings + +warnings.warn("We will remove this module from nibabel 5.0. " + "Please use the built-in Python `*` argument to ensure " + "keyword-only parameters (see PEP 3102).", + DeprecationWarning, + stacklevel=2) def kw_only_func(n): diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index ce1ba668b2..a0c3484a3a 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -2,6 +2,7 @@ from ..testing import assert_raises, assert_false MODULE_SCHEDULE = [ + ('5.0.0', ['nibabel.keywordonly']), ('4.0.0', ['nibabel.trackvis']), ('3.0.0', ['nibabel.minc', 'nibabel.checkwarns']), # Verify that the test will be quiet if the schedule outlives the modules From d8aef1eec7e37cd73b25ca55790d8fc18c9956b3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 09:39:54 -0500 Subject: [PATCH 411/689] DOC: Update reviewers in changelog --- Changelog | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Changelog b/Changelog index 165f1db94b..40cdc5d781 100644 --- a/Changelog +++ b/Changelog @@ -36,7 +36,8 @@ New features output type. Scale factors (slope, intercept) are applied, but may be cast to narrower types, to control memory usage. This is now the basis of ``img.get_fdata()``, which will scale data in single precision if - the output type is ``float32``. (pr/844) (CM, reviewed by ...) + the output type is ``float32``. (pr/844) (CM, reviewed by Alejandro + de la Vega, Ross Markello) * GiftiImage method ``agg_data()`` to return usable data arrays (pr/793) (Hao-Ting Wang, reviewed by CM) * Accept ``os.PathLike`` objects in place of filenames (pr/610) (Cameron From d8cf1a968b5f2185b2de42c4787f5540e7b15093 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 09:48:11 -0500 Subject: [PATCH 412/689] DOC: Update changelog with recent PRs --- Changelog | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Changelog b/Changelog index 40cdc5d781..a3b65a71fe 100644 --- a/Changelog +++ b/Changelog @@ -47,6 +47,9 @@ New features Enhancements ------------ +* Improve testing of data scaling in ArrayProxy API (pr/847) (CM, reviewed + by Alejandro de la Vega) +* Document ``SpatialImage.slicer`` interface (pr/846) (CM) * ``get_fdata(dtype=np.float32)`` will attempt to avoid casting data to ``np.float64`` when scaling parameters would otherwise promote the data type unnecessarily. (pr/833) (CM, reviewed by Ross Markello) @@ -68,6 +71,7 @@ Bug fixes Maintenance ----------- +* Remove replicated metadata for packaged data from MANIFEST.in (pr/845) (CM) * Support Python >=3.5.1, including Python 3.8.0 (pr/787) (CM) * Manage versioning with slightly customized Versioneer (pr/786) (CM) * Reference Nipy Community Code and Nibabel Developer Guidelines in From 65e4453773a7bab5a2bb3a6beb40178cc6bee491 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 08:26:54 -0500 Subject: [PATCH 413/689] MNT: Remove support for keep_file_open=="auto" --- nibabel/analyze.py | 2 +- nibabel/arrayproxy.py | 24 ++++++++---------------- nibabel/brikhead.py | 4 ++-- nibabel/dataobj_images.py | 4 ++-- nibabel/freesurfer/mghformat.py | 2 +- nibabel/spm99analyze.py | 2 +- nibabel/tests/test_arrayproxy.py | 16 +++------------- 7 files changed, 18 insertions(+), 36 deletions(-) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index dc352505c6..476f26c3a7 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -939,7 +939,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index c4189e61e8..9ff1fa3b6a 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -48,15 +48,13 @@ If this flag is set to ``True``, a single file handle is created and used. If ``False``, a new file handle is created every time the image is accessed. -If this flag is set to ``'auto'``, a ``DeprecationWarning`` will be raised, which -will become a ``ValueError`` in nibabel 3.0.0. If this is set to any other value, attempts to create an ``ArrayProxy`` without specifying the ``keep_file_open`` flag will result in a ``ValueError`` being raised. .. warning:: Setting this flag to a value of ``'auto'`` became deprecated - behaviour in version 2.4.1. Support for ``'auto'`` will be removed + behaviour in version 2.4.1. Support for ``'auto'`` was removed in version 3.0.0. """ KEEP_FILE_OPEN_DEFAULT = False @@ -102,7 +100,7 @@ def __init__(self, file_like, spec, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- @@ -239,14 +237,14 @@ def _should_keep_file_open(self, file_like, keep_file_open): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will be removed in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- file_like : object File-like object or filename, as passed to ``__init__``. - keep_file_open : { 'auto', True, False } + keep_file_open : { True, False } Flag as passed to ``__init__``. Returns @@ -259,23 +257,17 @@ def _should_keep_file_open(self, file_like, keep_file_open): """ if keep_file_open is None: keep_file_open = KEEP_FILE_OPEN_DEFAULT - if keep_file_open == 'auto': - warnings.warn("Setting nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT to 'auto' is " - "deprecated and will become an error in v3.0.", DeprecationWarning) - if keep_file_open == 'auto': - warnings.warn("A value of 'auto' for keep_file_open is deprecated and will become an " - "error in v3.0. You probably want False.", DeprecationWarning) + if keep_file_open not in (True, False): + raise ValueError("nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT must be boolean. " + "Found: {}".format(keep_file_open)) elif keep_file_open not in (True, False): - raise ValueError('keep_file_open should be one of {None, True, False}') + raise ValueError('keep_file_open must be one of {None, True, False}') # file_like is a handle - keep_file_open is irrelevant if hasattr(file_like, 'read') and hasattr(file_like, 'seek'): return False, False # if the file is a gzip file, and we have_indexed_gzip, have_igzip = openers.HAVE_INDEXED_GZIP and file_like.endswith('.gz') - # XXX Remove in v3.0 - if keep_file_open == 'auto': - return have_igzip, have_igzip persist_opener = keep_file_open or have_igzip return keep_file_open, persist_opener diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 7ef1386872..41bfc54c4d 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -227,7 +227,7 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- @@ -511,7 +511,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 4b4d1b55d8..a761d62b77 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -421,7 +421,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- @@ -459,7 +459,7 @@ def from_filename(klass, filename, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 6eb0f156e9..75c6de9781 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -543,7 +543,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index d420bb0c2e..e7aaa77048 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -250,7 +250,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has - been deprecated. It will raise an error in nibabel 3.0. + been deprecated. It raises an error as of nibabel 3.0. Parameters ---------- diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index b1cc081b6d..f9fa718cec 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -372,8 +372,6 @@ def test_keep_file_open_true_false_invalid(): # False | True | True | True # True | False | True | n/a # True | True | True | False - # 'auto' | False | False | n/a - # 'auto' | True | True | False # # Each test tuple contains: # - file type - gzipped ('gz') or not ('bin'), or an open file handle @@ -388,26 +386,18 @@ def test_keep_file_open_true_false_invalid(): ('open', False, True, False, False), ('open', True, False, False, False), ('open', True, True, False, False), - ('open', 'auto', False, False, False), - ('open', 'auto', True, False, False), # non-gzip file - have_igzip is irrelevant, decision should be made # solely from kfo flag ('bin', False, False, False, False), ('bin', False, True, False, False), ('bin', True, False, True, True), ('bin', True, True, True, True), - ('bin', 'auto', False, False, False), - ('bin', 'auto', True, False, False), - # gzip file. If igzip is present, we persist the ImageOpener. If kfo - # is 'auto': - # - if igzip is present, kfo -> True - # - otherwise, kfo -> False + # gzip file. If igzip is present, we persist the ImageOpener. ('gz', False, False, False, False), ('gz', False, True, True, False), ('gz', True, False, True, True), ('gz', True, True, True, True), - ('gz', 'auto', False, False, False), - ('gz', 'auto', True, True, True)] + ] dtype = np.float32 data = np.arange(1000, dtype=dtype).reshape((10, 10, 10)) @@ -480,7 +470,7 @@ def test_keep_file_open_true_false_invalid(): with assert_raises(ValueError): ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open=55) with assert_raises(ValueError): - ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open='autob') + ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open='auto') with assert_raises(ValueError): ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open='cauto') From ceb404908ac92963cf17d1ac888459865a332fb5 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 08:27:21 -0500 Subject: [PATCH 414/689] MNT: Remove minc.py --- nibabel/__init__.py | 4 +--- nibabel/minc.py | 10 ---------- nibabel/tests/test_minc1.py | 20 -------------------- 3 files changed, 1 insertion(+), 33 deletions(-) delete mode 100644 nibabel/minc.py diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 3e57643fc1..ff8911a8c2 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -78,9 +78,6 @@ def teardown_package(): from .minc2 import Minc2Image from .cifti2 import Cifti2Header, Cifti2Image from .gifti import GiftiImage -# Deprecated backwards compatiblity for MINC1 -from .deprecated import ModuleProxy as _ModuleProxy -minc = _ModuleProxy('nibabel.minc') from .minc1 import MincImage from .freesurfer import MGHImage from .funcs import (squeeze_image, concat_images, four_to_three, @@ -89,6 +86,7 @@ def teardown_package(): flip_axis, OrientationError, apply_orientation, aff2axcodes) from .imageclasses import class_map, ext_map, all_image_classes +from .deprecated import ModuleProxy as _ModuleProxy trackvis = _ModuleProxy('nibabel.trackvis') from . import mriutils from . import streamlines diff --git a/nibabel/minc.py b/nibabel/minc.py deleted file mode 100644 index 09523bdc36..0000000000 --- a/nibabel/minc.py +++ /dev/null @@ -1,10 +0,0 @@ -""" Deprecated MINC1 module """ - -import warnings - -warnings.warn("We will remove this module from nibabel 3.0; " - "Please use the 'minc1' module instead", - DeprecationWarning, - stacklevel=2) - -from .minc1 import * # noqa diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index a4d42fdc36..c5d92119ae 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -104,30 +104,10 @@ def test_old_namespace(): # Check warnings raised arr = np.arange(24).reshape((2, 3, 4)) aff = np.diag([2, 3, 4, 1]) - with clear_and_catch_warnings() as warns: - warnings.simplefilter('always', DeprecationWarning) - # Top level import. - # This import does not trigger an import of the minc.py module, because - # it's the proxy object. - from .. import minc - assert_equal(warns, []) - # If there was a previous import it will be module, otherwise it will be - # a proxy - previous_import = isinstance(minc, types.ModuleType) - if not previous_import: - assert_true(isinstance(minc, ModuleProxy)) - old_minc1image = minc.Minc1Image # just to check it works - # There may or may not be a warning raised on accessing the proxy, - # depending on whether the minc.py module is already imported in this - # test run. - if not previous_import: - assert_equal(warns.pop(0).category, DeprecationWarning) with clear_and_catch_warnings() as warns: from .. import Minc1Image, MincImage assert_equal(warns, []) - # The import from old module is the same as that from new - assert_true(old_minc1image is Minc1Image) # But the old named import, imported from new, is not the same assert_false(Minc1Image is MincImage) assert_equal(warns, []) From 8fbeba20a4784d7a34cbca3261d92a5e106eaad6 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 08:44:26 -0500 Subject: [PATCH 415/689] DOC: Use nifti1 to demonstrate ModuleProxy instead of the ephemeral minc --- nibabel/deprecated.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/deprecated.py b/nibabel/deprecated.py index 715ee7f60d..1a0f85330d 100644 --- a/nibabel/deprecated.py +++ b/nibabel/deprecated.py @@ -20,10 +20,10 @@ class ModuleProxy(object): :: arr = np.arange(24).reshape((2, 3, 4)) - minc = ModuleProxy('nibabel.minc') - minc_image = minc.Minc1Image(arr, np.eye(4)) + nifti1 = ModuleProxy('nibabel.nifti1') + nifti1_image = nifti1.Nifti1Image(arr, np.eye(4)) - So, the ``minc`` object is a proxy that will import the required module + So, the ``nifti1`` object is a proxy that will import the required module when you do attribute access and return the attributes of the imported module. """ From f6d9992b8530a7bb2b8ccfc40b931a2410ede995 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 09:22:02 -0500 Subject: [PATCH 416/689] MNT: Remove checkwarns --- nibabel/checkwarns.py | 31 ------------------------------- nibabel/tests/test_checkwarns.py | 11 ----------- 2 files changed, 42 deletions(-) delete mode 100644 nibabel/checkwarns.py delete mode 100644 nibabel/tests/test_checkwarns.py diff --git a/nibabel/checkwarns.py b/nibabel/checkwarns.py deleted file mode 100644 index a5942427b6..0000000000 --- a/nibabel/checkwarns.py +++ /dev/null @@ -1,31 +0,0 @@ -# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# -# See COPYING file distributed along with the NiBabel package for the -# copyright and license terms. -# -### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Contexts for *with* statement allowing checks for warnings -''' - -import warnings - -from .testing import (error_warnings, suppress_warnings) -from .deprecated import deprecate_with_version - - -warnings.warn('The checkwarns module is deprecated and will be removed ' - 'in nibabel v3.0', DeprecationWarning) - - -@deprecate_with_version('ErrorWarnings is deprecated; use nibabel.testing.error_warnings.', - since='2.1.0', until='3.0.0') -class ErrorWarnings(error_warnings): - pass - - -@deprecate_with_version('IgnoreWarnings is deprecated; use nibabel.testing.suppress_warnings.', - since='2.1.0', until='3.0.0') -class IgnoreWarnings(suppress_warnings): - pass diff --git a/nibabel/tests/test_checkwarns.py b/nibabel/tests/test_checkwarns.py deleted file mode 100644 index b1e6483273..0000000000 --- a/nibabel/tests/test_checkwarns.py +++ /dev/null @@ -1,11 +0,0 @@ -""" Tests for warnings context managers -""" -from ..testing import assert_equal, assert_warns, suppress_warnings - - -def test_ignore_and_error_warnings(): - with suppress_warnings(): - from .. import checkwarns - - assert_warns(DeprecationWarning, checkwarns.IgnoreWarnings) - assert_warns(DeprecationWarning, checkwarns.ErrorWarnings) From 9758d55d7fbd3e598a1485f14073abee92baa23b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 16:38:12 -0500 Subject: [PATCH 417/689] TEST: Test invalid KEEP_FILE_OPEN_DEFAULT values --- nibabel/tests/test_arrayproxy.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index f9fa718cec..af7d5b73e6 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -467,12 +467,14 @@ def test_keep_file_open_true_false_invalid(): fname = 'testdata' with open(fname, 'wb') as fobj: fobj.write(data.tostring(order='F')) - with assert_raises(ValueError): - ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open=55) - with assert_raises(ValueError): - ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open='auto') - with assert_raises(ValueError): - ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open='cauto') + + for invalid_kfo in (55, 'auto', 'cauto'): + with assert_raises(ValueError): + ArrayProxy(fname, ((10, 10, 10), dtype), + keep_file_open=invalid_kfo) + with patch_keep_file_open_default(invalid_kfo): + with assert_raises(ValueError): + ArrayProxy(fname, ((10, 10, 10), dtype)) def test_pickle_lock(): From 64f996169377c0e3addab5e0746989af6439ae7c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 11 Dec 2019 17:58:28 -0500 Subject: [PATCH 418/689] STY: Remove unused import [skip ci] --- nibabel/arrayproxy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 9ff1fa3b6a..b508e651bd 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -27,7 +27,6 @@ """ from contextlib import contextmanager from threading import RLock -import warnings import numpy as np From c05c499aa74b32f23fa7bf5aaa609e799755e3cc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 11 Dec 2019 16:31:41 -0500 Subject: [PATCH 419/689] TEST: Move from mock to unittest.mock --- dev-requirements.txt | 1 - doc-requirements.txt | 1 - doc/source/installation.rst | 1 - doc/source/links_names.txt | 1 - nibabel/__init__.py | 16 ++++------------ nibabel/benchmarks/bench_arrayproxy_slicing.py | 2 +- nibabel/cmdline/tests/test_parrec2nii.py | 2 +- nibabel/tests/test_arrayproxy.py | 2 +- nibabel/tests/test_openers.py | 2 +- nibabel/tests/test_optpkg.py | 2 +- setup.cfg | 3 --- 11 files changed, 9 insertions(+), 24 deletions(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index f63af96cf4..659ab6cada 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,4 +1,3 @@ # Requirements for running tests -r requirements.txt nose -mock diff --git a/doc-requirements.txt b/doc-requirements.txt index 59c87a4e6f..ccb1b574a9 100644 --- a/doc-requirements.txt +++ b/doc-requirements.txt @@ -4,4 +4,3 @@ sphinx numpydoc texext matplotlib>=1.3 -mock diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 2dab695e80..b578cdc695 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -89,7 +89,6 @@ Requirements * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) * nose_ 0.11 or greater (optional, to run the tests) -* mock_ (optional, to run the tests) * sphinx_ (optional, to build the documentation) Get the development sources diff --git a/doc/source/links_names.txt b/doc/source/links_names.txt index 4b37ef9a4d..3370bc9d58 100644 --- a/doc/source/links_names.txt +++ b/doc/source/links_names.txt @@ -83,7 +83,6 @@ .. _emacs_python_mode: http://www.emacswiki.org/cgi-bin/wiki/PythonMode .. _doctest-mode: http://ed.loper.org/projects/doctestmode/ .. _nose: http://somethingaboutorange.com/mrl/projects/nose -.. _mock: https://github.com/testing-cabal/mock .. _`python coverage tester`: http://nedbatchelder.com/code/coverage/ .. _bitbucket: https://bitbucket.org .. _six: http://pythonhosted.org/six diff --git a/nibabel/__init__.py b/nibabel/__init__.py index ff8911a8c2..2d3428289c 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -92,18 +92,10 @@ def teardown_package(): from . import streamlines from . import viewers -import pkgutil - -if not pkgutil.find_loader('mock'): - def test(*args, **kwargs): - raise RuntimeError('Need "mock" package for tests') -else: - from numpy.testing import Tester - test = Tester().test - bench = Tester().bench - del Tester - -del pkgutil +from numpy.testing import Tester +test = Tester().test +bench = Tester().bench +del Tester from .pkg_info import get_pkg_info as _get_pkg_info diff --git a/nibabel/benchmarks/bench_arrayproxy_slicing.py b/nibabel/benchmarks/bench_arrayproxy_slicing.py index c880aa0700..7fe79763d0 100644 --- a/nibabel/benchmarks/bench_arrayproxy_slicing.py +++ b/nibabel/benchmarks/bench_arrayproxy_slicing.py @@ -18,7 +18,7 @@ import gc import itertools as it import numpy as np -import mock +from unittest import mock import nibabel as nib from nibabel.tmpdirs import InTemporaryDirectory diff --git a/nibabel/cmdline/tests/test_parrec2nii.py b/nibabel/cmdline/tests/test_parrec2nii.py index c5b5831270..9fb556e34d 100644 --- a/nibabel/cmdline/tests/test_parrec2nii.py +++ b/nibabel/cmdline/tests/test_parrec2nii.py @@ -8,7 +8,7 @@ import nibabel from nibabel.cmdline import parrec2nii -from mock import Mock, MagicMock, patch +from unittest.mock import Mock, MagicMock, patch from nose.tools import assert_true from numpy.testing import (assert_almost_equal, assert_array_equal) diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index af7d5b73e6..15e69a1829 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -23,7 +23,7 @@ from ..openers import ImageOpener from ..nifti1 import Nifti1Header -import mock +from unittest import mock from numpy.testing import assert_array_equal, assert_array_almost_equal from nose.tools import (assert_true, assert_false, assert_equal, diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 69704eaeb1..6d1baab734 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -18,7 +18,7 @@ from ..tmpdirs import InTemporaryDirectory from ..volumeutils import BinOpener -import mock +from unittest import mock from nose.tools import (assert_true, assert_false, assert_equal, assert_not_equal, assert_raises) from ..testing import error_warnings diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index 99f90b5de6..17c0816e70 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -1,7 +1,7 @@ """ Testing optpkg module """ -import mock +from unittest import mock import types import sys import builtins diff --git a/setup.cfg b/setup.cfg index 069d93d007..b008ef79de 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,7 +34,6 @@ install_requires = tests_require = nose >=0.11 pytest - mock test_suite = nose.collector zip_safe = False packages = find: @@ -47,7 +46,6 @@ dev = twine doc = matplotlib >= 1.3.1 - mock numpydoc sphinx >=0.3 texext @@ -55,7 +53,6 @@ style = flake8 test = coverage - mock nose >=0.11 pytest all = From 4289dbe68f77a42d01d4a9e94d867c12b6ecd6da Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 11 Dec 2019 19:46:40 -0500 Subject: [PATCH 420/689] MNT/DOC: Add h5py to notes, installation extras --- doc/source/installation.rst | 1 + doc/source/links_names.txt | 1 + setup.cfg | 3 +++ 3 files changed, 5 insertions(+) diff --git a/doc/source/installation.rst b/doc/source/installation.rst index b578cdc695..b097a04048 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -86,6 +86,7 @@ Requirements * Python_ 3.5.1 or greater * NumPy_ 1.12 or greater * SciPy_ (optional, for full SPM-ANALYZE support) +* h5py_ (optional, for MINC2 support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) * nose_ 0.11 or greater (optional, to run the tests) diff --git a/doc/source/links_names.txt b/doc/source/links_names.txt index 3370bc9d58..7dc24e2cba 100644 --- a/doc/source/links_names.txt +++ b/doc/source/links_names.txt @@ -111,6 +111,7 @@ .. _twine: https://pypi.python.org/pypi/twine .. _datapkg: https://pythonhosted.org/datapkg/ .. _python imaging library: https://pypi.python.org/pypi/Pillow +.. _h5py: https://www.h5py.org/ .. Python imaging projects .. _PyMVPA: http://www.pymvpa.org diff --git a/setup.cfg b/setup.cfg index b008ef79de..8478cc37db 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,6 +49,8 @@ doc = numpydoc sphinx >=0.3 texext +minc2 = + h5py style = flake8 test = @@ -59,6 +61,7 @@ all = %(dicom)s %(dev)s %(doc)s + %(minc2)s %(style)s %(test)s From d204a4c07005b7bd6201929340c3593dd28a53cc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 11 Dec 2019 19:54:55 -0500 Subject: [PATCH 421/689] DOC: Update changelog --- Changelog | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Changelog b/Changelog index a3b65a71fe..0d1ec85d09 100644 --- a/Changelog +++ b/Changelog @@ -79,6 +79,10 @@ Maintenance API changes and deprecations ---------------------------- +* Fully remove deprecated ``checkwarns`` and ``minc`` modules. (pr/852) (CM) +* The ``keep_file_open`` argument to file load operations and ``ArrayProxy``s + no longer acccepts the value ``"auto"``, raising a ``ValueError``. (pr/852) + (CM) * Deprecate ``ArraySequence.data`` in favor of ``ArraySequence.get_data()``, which will return a copy. ``ArraySequence.data`` now returns a read-only view. (pr/811) (MC, reviewed by Serge Koudoro, Philippe Poulin, CM, MB) From a3b10746531136e271c24e399d2bc87a7f7707f0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 11 Dec 2019 19:56:05 -0500 Subject: [PATCH 422/689] REL: 3.0.0rc2 --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index 5953ee2d8f..6af408609f 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -13,7 +13,7 @@ _version_major = 3 _version_minor = 0 _version_micro = 0 -_version_extra = 'rc1.post.dev' +_version_extra = 'rc2' # _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" From d1518aa71a8a80f5e7049a3509dfb49cf6b78005 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 11 Dec 2019 21:24:01 -0500 Subject: [PATCH 423/689] MNT: Version 3.0.0rc2.post.dev --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index 6af408609f..8231b1db0c 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -13,7 +13,7 @@ _version_major = 3 _version_minor = 0 _version_micro = 0 -_version_extra = 'rc2' +_version_extra = 'rc2.post.dev' # _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" From c1b92dbdeb649f8aa4a59a18b49a471a8d6d99b9 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 18 Dec 2019 09:46:02 -0500 Subject: [PATCH 424/689] REL: 3.0 --- Changelog | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Changelog b/Changelog index 0d1ec85d09..de60b8c36d 100644 --- a/Changelog +++ b/Changelog @@ -25,8 +25,8 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. -3.0.0 (To Be Determined) -======================== +3.0.0 (Wednesday 18 December 2019) +================================== New features ------------ From 9397cb4b33d6e0c455571b662a94b41740ee14b0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 18 Dec 2019 09:59:48 -0500 Subject: [PATCH 425/689] MNT: Begin development on 3.1 --- nibabel/info.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index 8231b1db0c..bafc3f6adb 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -11,10 +11,9 @@ # This should be set to the intended next version + dev to indicate a # development (pre-release) version. _version_major = 3 -_version_minor = 0 +_version_minor = 1 _version_micro = 0 -_version_extra = 'rc2.post.dev' -# _version_extra = '' +_version_extra = 'dev' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" VERSION = "%s.%s.%s%s" % (_version_major, From 683e234ec932b69463e987528645028c82a213a4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 18 Dec 2019 10:02:27 -0500 Subject: [PATCH 426/689] MNT: 3.0.x maintenance branch --- nibabel/info.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index 8231b1db0c..eb2e1fa6d0 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -12,9 +12,8 @@ # development (pre-release) version. _version_major = 3 _version_minor = 0 -_version_micro = 0 -_version_extra = 'rc2.post.dev' -# _version_extra = '' +_version_micro = 1 +_version_extra = 'dev' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" VERSION = "%s.%s.%s%s" % (_version_major, From 5d3e817e2d675235f46792f072c625f320c9d440 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 18 Dec 2019 10:02:27 -0500 Subject: [PATCH 427/689] MNT: 3.0.x maintenance branch --- nibabel/info.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/info.py b/nibabel/info.py index 8231b1db0c..eb2e1fa6d0 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -12,9 +12,8 @@ # development (pre-release) version. _version_major = 3 _version_minor = 0 -_version_micro = 0 -_version_extra = 'rc2.post.dev' -# _version_extra = '' +_version_micro = 1 +_version_extra = 'dev' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" VERSION = "%s.%s.%s%s" % (_version_major, From 3c00929082507b30b251541173663d5c481d24ee Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Jan 2020 10:09:14 -0500 Subject: [PATCH 428/689] FIX: Validate ExpiredDeprecationErrors --- nibabel/tests/test_arrayproxy.py | 7 --- nibabel/tests/test_image_api.py | 7 +-- nibabel/tests/test_minc1.py | 26 +++------ nibabel/tests/test_proxy_api.py | 9 +-- nibabel/tests/test_scaling.py | 81 ++------------------------ nibabel/tests/test_spatialimages.py | 33 ++++------- nibabel/tests/test_volumeutils.py | 89 ++++++++++++++++++++--------- nibabel/volumeutils.py | 40 ++++++------- 8 files changed, 115 insertions(+), 177 deletions(-) diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index 15e69a1829..527f9b8f91 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -112,9 +112,6 @@ def test_tuplespec(): ): assert_array_equal(getattr(ap_header, method)(*args), getattr(ap_tuple, method)(*args)) - # Tuple-defined ArrayProxies have no header to store - with warnings.catch_warnings(): - assert_true(ap_tuple.header is None) # Partial tuples of length 2-4 are also valid for n in range(2, 5): ArrayProxy(bio, tuple_spec[:n]) @@ -141,10 +138,6 @@ def test_nifti1_init(): ap = ArrayProxy(bio, hdr) assert_true(ap.file_like == bio) assert_equal(ap.shape, shape) - # Check there has been a copy of the header - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - assert_false(ap.header is hdr) # Get the data assert_array_equal(np.asarray(ap), arr * 2.0 + 10) with InTemporaryDirectory(): diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 10b61628c7..f91b61af9e 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -47,6 +47,7 @@ from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns, assert_allclose from ..testing import clear_and_catch_warnings from ..tmpdirs import InTemporaryDirectory +from ..deprecator import ExpiredDeprecationError from .test_api_validators import ValidateAPI from .test_helpers import (bytesio_round_trip, bytesio_filemap, @@ -422,10 +423,8 @@ def validate_ndim(self, imaker, params): def validate_shape_deprecated(self, imaker, params): # Check deprecated get_shape API img = imaker() - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) - assert_equal(img.get_shape(), params['shape']) - assert_equal(len(w), 1) + with assert_raises(ExpiredDeprecationError): + img.get_shape() def validate_mmap_parameter(self, imaker, params): img = imaker() diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index c5d92119ae..eb9ff15cab 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -26,6 +26,7 @@ from ..tmpdirs import InTemporaryDirectory from ..testing import (assert_true, assert_equal, assert_false, assert_raises, assert_warns, assert_array_equal, data_path, clear_and_catch_warnings) +from ..deprecator import ExpiredDeprecationError from . import test_spatialimages as tsi from .test_fileslice import slicer_samples @@ -105,25 +106,16 @@ def test_old_namespace(): arr = np.arange(24).reshape((2, 3, 4)) aff = np.diag([2, 3, 4, 1]) - with clear_and_catch_warnings() as warns: - from .. import Minc1Image, MincImage - assert_equal(warns, []) - # But the old named import, imported from new, is not the same - assert_false(Minc1Image is MincImage) - assert_equal(warns, []) - # Create object using old name - mimg = MincImage(arr, aff) - # Call to create object created warning - assert_equal(warns.pop(0).category, FutureWarning) - assert_array_equal(mimg.get_fdata(), arr) - # Another old name - from ..minc1 import MincFile, Minc1File - assert_false(MincFile is Minc1File) + from .. import Minc1Image, MincImage + assert_false(Minc1Image is MincImage) + with assert_raises(ExpiredDeprecationError): + MincImage(arr, aff) assert_equal(warns, []) + # Another old name + from ..minc1 import MincFile, Minc1File + assert_false(MincFile is Minc1File) + with assert_raises(ExpiredDeprecationError): mf = MincFile(netcdf_file(EG_FNAME)) - # Call to create object created warning - assert_equal(warns.pop(0).category, FutureWarning) - assert_equal(mf.get_data_shape(), (10, 20, 20)) class _TestMincFile(object): diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 48a024795d..d7ca111f22 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -58,6 +58,7 @@ from numpy.testing import assert_almost_equal, assert_array_equal, assert_allclose from ..testing import data_path as DATA_PATH, assert_dt_equal, clear_and_catch_warnings +from ..deprecator import ExpiredDeprecationError from ..tmpdirs import InTemporaryDirectory @@ -324,12 +325,8 @@ def validate_slope_inter_offset(self, pmaker, params): def validate_deprecated_header(self, pmaker, params): prox, fio, hdr = pmaker() - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - # Header is a copy of original - assert_false(prox.header is hdr) - assert_equal(prox.header, hdr) - assert_equal(warns.pop(0).category, DeprecationWarning) + with assert_raises(ExpiredDeprecationError): + prox.header class TestSpm99AnalyzeProxyAPI(TestAnalyzeProxyAPI): diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index 019cc58d1c..cd66bbfe3a 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -11,11 +11,12 @@ import numpy as np from io import BytesIO -from ..volumeutils import (calculate_scale, scale_min_max, finite_range, - apply_read_scaling, array_to_file, array_from_file) +from ..volumeutils import finite_range, apply_read_scaling, array_to_file, array_from_file from ..casting import type_info from ..testing import suppress_warnings +from .test_volumeutils import _calculate_scale + from numpy.testing import (assert_array_almost_equal, assert_array_equal) from nose.tools import (assert_true, assert_equal, assert_raises, @@ -26,56 +27,6 @@ DEBUG = True -def test_scale_min_max(): - mx_dt = np.maximum_sctype(np.float) - for tp in np.sctypes['uint'] + np.sctypes['int']: - info = np.iinfo(tp) - # Need to pump up to max fp type to contain python longs - imin = np.array(info.min, dtype=mx_dt) - imax = np.array(info.max, dtype=mx_dt) - value_pairs = ( - (0, imax), - (imin, 0), - (imin, imax), - (1, 10), - (-1, -1), - (1, 1), - (-10, -1), - (-100, 10)) - for mn, mx in value_pairs: - # with intercept - scale, inter = scale_min_max(mn, mx, tp, True) - if mx - mn: - assert_array_almost_equal, (mx - inter) / scale, imax - assert_array_almost_equal, (mn - inter) / scale, imin - else: - assert_equal, (scale, inter), (1.0, mn) - # without intercept - if imin == 0 and mn < 0 and mx > 0: - (assert_raises, ValueError, - scale_min_max, mn, mx, tp, False) - continue - scale, inter = scale_min_max(mn, mx, tp, False) - assert_equal, inter, 0.0 - if mn == 0 and mx == 0: - assert_equal, scale, 1.0 - continue - sc_mn = mn / scale - sc_mx = mx / scale - assert_true, sc_mn >= imin - assert_true, sc_mx <= imax - if imin == 0: - if mx > 0: # numbers all +ve - assert_array_almost_equal, mx / scale, imax - else: # numbers all -ve - assert_array_almost_equal, mn / scale, imax - continue - if abs(mx) >= abs(mn): - assert_array_almost_equal, mx / scale, imax - else: - assert_array_almost_equal, mn / scale, imin - - def test_finite_range(): # Finite range utility function for in_arr, res in ( @@ -122,26 +73,6 @@ def test_finite_range(): assert_raises(TypeError, finite_range, a) -def test_calculate_scale(): - # Test for special cases in scale calculation - npa = np.array - # Here the offset handles it - res = calculate_scale(npa([-2, -1], dtype=np.int8), np.uint8, True) - assert_equal(res, (1.0, -2.0, None, None)) - # Not having offset not a problem obviously - res = calculate_scale(npa([-2, -1], dtype=np.int8), np.uint8, 0) - assert_equal(res, (-1.0, 0.0, None, None)) - # Case where offset handles scaling - res = calculate_scale(npa([-1, 1], dtype=np.int8), np.uint8, 1) - assert_equal(res, (1.0, -1.0, None, None)) - # Can't work for no offset case - assert_raises(ValueError, - calculate_scale, npa([-1, 1], dtype=np.int8), np.uint8, 0) - # Offset trick can't work when max is out of range - res = calculate_scale(npa([-1, 255], dtype=np.int16), np.uint8, 1) - assert_not_equal(res, (1.0, -1.0, None, None)) - - def test_a2f_mn_mx(): # Test array to file mn, mx handling str_io = BytesIO() @@ -213,9 +144,9 @@ def test_array_file_scales(): info = type_info(in_type) arr[0], arr[1] = info['min'], info['max'] if not err is None: - assert_raises(err, calculate_scale, arr, out_dtype, True) + assert_raises(err, _calculate_scale, arr, out_dtype, True) continue - slope, inter, mn, mx = calculate_scale(arr, out_dtype, True) + slope, inter, mn, mx = _calculate_scale(arr, out_dtype, True) array_to_file(arr, bio, out_type, 0, inter, slope, mn, mx) bio.seek(0) arr2 = array_from_file(arr.shape, out_dtype, bio) @@ -266,7 +197,7 @@ def check_int_a2f(in_type, out_type): data[1] = this_max + 0j str_io = BytesIO() try: - scale, inter, mn, mx = calculate_scale(data, out_type, True) + scale, inter, mn, mx = _calculate_scale(data, out_type, True) except ValueError as e: if DEBUG: print(in_type, out_type, e) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 7d275e3366..8b11e5cc51 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -28,6 +28,7 @@ from ..testing import (clear_and_catch_warnings, suppress_warnings, memmap_after_ufunc) from ..tmpdirs import InTemporaryDirectory +from ..deprecator import ExpiredDeprecationError from .. import load as top_load @@ -284,8 +285,8 @@ def test_data_shape(self): img = img_klass(arr, np.eye(4)) # Shape may be promoted to higher dimension, but may not reorder or # change size - assert_equal(img.get_shape()[:1], (4,)) - assert_equal(np.prod(img.get_shape()), 4) + assert_equal(img.shape[:1], (4,)) + assert_equal(np.prod(img.shape), 4) img = img_klass(np.zeros((2, 3, 4), dtype=np.float32), np.eye(4)) assert_equal(img.shape, (2, 3, 4)) @@ -305,19 +306,13 @@ def test_str(self): assert_true(len(str(img)) > 0) def test_get_shape(self): - # Check there is a get_shape method - # (it is deprecated) + # Check that get_shape raises an ExpiredDeprecationError img_klass = self.image_class # Assumes all possible images support int16 # See https://github.com/nipy/nibabel/issues/58 img = img_klass(np.arange(1, dtype=np.int16), np.eye(4)) - with suppress_warnings(): - # Shape may be promoted to higher dimension, but may not reorder or - # change size - assert_equal(img.get_shape()[:1], (1,)) - assert_equal(np.prod(img.get_shape()), 1) - img = img_klass(np.zeros((2, 3, 4), np.int16), np.eye(4)) - assert_equal(img.get_shape(), (2, 3, 4)) + with assert_raises(ExpiredDeprecationError): + img.get_shape() def test_get_fdata(self): # Test array image and proxy image interface for floating point data @@ -568,18 +563,14 @@ def from_file_map(self, file_map=None): bio = BytesIO() file_map = FakeImage.make_file_map({'image': bio}) - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) + with assert_raises(ExpiredDeprecationError): img.to_files(file_map) - assert_equal(len(w), 1) + with assert_raises(ExpiredDeprecationError): img.to_filespec('an_image') - assert_equal(len(w), 2) - img = FakeImage.from_files(file_map) - assert_equal(len(w), 3) - file_map = FakeImage.filespec_to_files('an_image') - assert_equal(list(file_map), ['image']) - assert_equal(file_map['image'].filename, 'an_image.foo') - assert_equal(len(w), 4) + with assert_raises(ExpiredDeprecationError): + FakeImage.from_files(file_map) + with assert_raises(ExpiredDeprecationError): + FakeImage.filespec_to_files('an_image') class MmapImageMixin(object): diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 4072f85131..d391abf359 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -32,8 +32,9 @@ array_to_file, allopen, # for backwards compatibility fname_ext_ul_case, - calculate_scale, - can_cast, + calculate_scale, # Deprecated + can_cast, # Deprecated + scale_min_max, # Deprecated write_zeros, seek_tell, apply_read_scaling, @@ -52,8 +53,11 @@ from ..openers import Opener, BZ2File from ..casting import (floor_log2, type_info, OK_FLOATS, shared_range) +from ..deprecator import ExpiredDeprecationError + from numpy.testing import (assert_array_almost_equal, assert_array_equal) +from nose.tools import assert_raises from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, suppress_warnings, clear_and_catch_warnings) @@ -67,6 +71,15 @@ NUMERIC_TYPES = CFLOAT_TYPES + IUINT_TYPES +def test_deprecated_functions(): + with assert_raises(ExpiredDeprecationError): + scale_min_max(0, 1, np.uint8, True) + with assert_raises(ExpiredDeprecationError): + calculate_scale(np.array([-2, -1], dtype=np.int8), np.uint8, True) + with assert_raises(ExpiredDeprecationError): + can_cast(np.float32, np.float32) + + def test__is_compressed_fobj(): # _is_compressed helper function with InTemporaryDirectory(): @@ -294,9 +307,7 @@ def test_array_to_file(): for code in '<>': ndt = dt.newbyteorder(code) for allow_intercept in (True, False): - with suppress_warnings(): # deprecated - scale, intercept, mn, mx = \ - calculate_scale(arr, ndt, allow_intercept) + scale, intercept, mn, mx = _calculate_scale(arr, ndt, allow_intercept) data_back = write_return(arr, str_io, ndt, 0, intercept, scale) assert_array_almost_equal(arr, data_back) @@ -875,28 +886,6 @@ def test_best_write_scale_ftype(): assert best_write_scale_ftype(arr, lower_t(0.5), 0) == lower_t -def test_can_cast(): - tests = ((np.float32, np.float32, True, True, True), - (np.float64, np.float32, True, True, True), - (np.complex128, np.float32, False, False, False), - (np.float32, np.complex128, True, True, True), - (np.float32, np.uint8, False, True, True), - (np.uint32, np.complex128, True, True, True), - (np.int64, np.float32, True, True, True), - (np.complex128, np.int16, False, False, False), - (np.float32, np.int16, False, True, True), - (np.uint8, np.int16, True, True, True), - (np.uint16, np.int16, False, True, True), - (np.int16, np.uint16, False, False, True), - (np.int8, np.uint16, False, False, True), - (np.uint16, np.uint8, False, True, True), - ) - for intype, outtype, def_res, scale_res, all_res in tests: - assert def_res == can_cast(intype, outtype) - assert scale_res == can_cast(intype, outtype, False, True) - assert all_res == can_cast(intype, outtype, True, True) - - def test_write_zeros(): bio = BytesIO() write_zeros(bio, 10000) @@ -1291,3 +1280,49 @@ def run(self): if err: raise err[0] + + +def _calculate_scale(data, out_dtype, allow_intercept): + ''' Calculate scaling and optional intercept for data + + Copy of the deprecated volumeutils.calculate_scale, to preserve tests + + Parameters + ---------- + data : array + out_dtype : dtype + output data type in some form understood by ``np.dtype`` + allow_intercept : bool + If True allow non-zero intercept + + Returns + ------- + scaling : None or float + scalefactor to divide into data. None if no valid data + intercept : None or float + intercept to subtract from data. None if no valid data + mn : None or float + minimum of finite value in data or None if this will not + be used to threshold data + mx : None or float + minimum of finite value in data, or None if this will not + be used to threshold data + ''' + # Code here is a compatibility shell around arraywriters refactor + in_dtype = data.dtype + out_dtype = np.dtype(out_dtype) + if np.can_cast(in_dtype, out_dtype): + return 1.0, 0.0, None, None + from ..arraywriters import make_array_writer, WriterError, get_slope_inter + try: + writer = make_array_writer(data, out_dtype, True, allow_intercept) + except WriterError as e: + raise ValueError(str(e)) + if out_dtype.kind in 'fc': + return (1.0, 0.0, None, None) + mn, mx = writer.finite_range() + if (mn, mx) == (np.inf, -np.inf): # No valid data + return (None, None, None, None) + if in_dtype.kind not in 'fc': + mn, mx = (None, None) + return get_slope_inter(writer) + (mn, mx) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 2cc083ecb6..41d248a671 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -400,17 +400,17 @@ def can_cast(in_type, out_type, has_intercept=False, has_slope=False): Examples -------- - >>> can_cast(np.float64, np.float32) + >>> can_cast(np.float64, np.float32) # doctest: +SKIP True - >>> can_cast(np.complex128, np.float32) + >>> can_cast(np.complex128, np.float32) # doctest: +SKIP False - >>> can_cast(np.int64, np.float32) + >>> can_cast(np.int64, np.float32) # doctest: +SKIP True - >>> can_cast(np.float32, np.int16) + >>> can_cast(np.float32, np.int16) # doctest: +SKIP False - >>> can_cast(np.float32, np.int16, False, True) + >>> can_cast(np.float32, np.int16, False, True) # doctest: +SKIP True - >>> can_cast(np.int16, np.uint8) + >>> can_cast(np.int16, np.uint8) # doctest: +SKIP False Whether we can actually cast int to uint when we don't have an intercept @@ -420,9 +420,9 @@ def can_cast(in_type, out_type, has_intercept=False, has_slope=False): Here we need an intercept to scale the full range of an int to a uint - >>> can_cast(np.int16, np.uint8, False, True) + >>> can_cast(np.int16, np.uint8, False, True) # doctest: +SKIP False - >>> can_cast(np.int16, np.uint8, True, True) + >>> can_cast(np.int16, np.uint8, True, True) # doctest: +SKIP True ''' in_dtype = np.dtype(in_type) @@ -1094,26 +1094,26 @@ def scale_min_max(mn, mx, out_type, allow_intercept): Examples -------- - >>> scale_min_max(0, 255, np.uint8, False) + >>> scale_min_max(0, 255, np.uint8, False) # doctest: +SKIP (1.0, 0.0) - >>> scale_min_max(-128, 127, np.int8, False) + >>> scale_min_max(-128, 127, np.int8, False) # doctest: +SKIP (1.0, 0.0) - >>> scale_min_max(0, 127, np.int8, False) + >>> scale_min_max(0, 127, np.int8, False) # doctest: +SKIP (1.0, 0.0) - >>> scaling, intercept = scale_min_max(0, 127, np.int8, True) - >>> np.allclose((0 - intercept) / scaling, -128) + >>> scaling, intercept = scale_min_max(0, 127, np.int8, True) # doctest: +SKIP + >>> np.allclose((0 - intercept) / scaling, -128) # doctest: +SKIP True - >>> np.allclose((127 - intercept) / scaling, 127) + >>> np.allclose((127 - intercept) / scaling, 127) # doctest: +SKIP True - >>> scaling, intercept = scale_min_max(-10, -1, np.int8, True) - >>> np.allclose((-10 - intercept) / scaling, -128) + >>> scaling, intercept = scale_min_max(-10, -1, np.int8, True) # doctest: +SKIP + >>> np.allclose((-10 - intercept) / scaling, -128) # doctest: +SKIP True - >>> np.allclose((-1 - intercept) / scaling, 127) + >>> np.allclose((-1 - intercept) / scaling, 127) # doctest: +SKIP True - >>> scaling, intercept = scale_min_max(1, 10, np.int8, True) - >>> np.allclose((1 - intercept) / scaling, -128) + >>> scaling, intercept = scale_min_max(1, 10, np.int8, True) # doctest: +SKIP + >>> np.allclose((1 - intercept) / scaling, -128) # doctest: +SKIP True - >>> np.allclose((10 - intercept) / scaling, 127) + >>> np.allclose((10 - intercept) / scaling, 127) # doctest: +SKIP True Notes From 0ad47fe2869d0bcbcce017c5e79501b764eadc21 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Jan 2020 11:10:46 -0500 Subject: [PATCH 429/689] FIX: Interpret "ver+extra" as post-release, not pre-release --- nibabel/pkg_info.py | 4 ++++ nibabel/tests/test_pkg_info.py | 2 ++ 2 files changed, 6 insertions(+) diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index 9b88bc764b..c8bff3941d 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -55,6 +55,10 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): pkg_version, pkg_extra = _parse_version(pkg_version_str) if version != pkg_version: return _cmp(StrictVersion(version), StrictVersion(pkg_version)) + if extra.startswith('+') or pkg_extra.startswith('+'): + return (1 if pkg_extra == '' + else -1 if extra == '' + else _cmp(extra, pkg_extra)) return (0 if extra == pkg_extra else 1 if extra == '' else -1 if pkg_extra == '' diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index 1a5775d33f..2b2ad788da 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -63,6 +63,8 @@ def test_cmp_pkg_version(): ('1.2.1rc', '1.2.1rc1', -1), ('1.2.1b', '1.2.1a', 1), ('1.2.1a', '1.2.1b', -1), + ('1.2.1+1', '1.2.1', 1), + ('1.2.1', '1.2.1+1', -1), ): assert_equal(cmp_pkg_version(test_ver, pkg_ver), exp_out) assert_raises(ValueError, cmp_pkg_version, 'foo.2') From fadfb451509ee567f691059b50503d3c225a9e83 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Jan 2020 11:17:52 -0500 Subject: [PATCH 430/689] TEST: Numpy changed longdouble str representations in 1.18 --- nibabel/tests/test_h5py_compat.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_h5py_compat.py b/nibabel/tests/test_h5py_compat.py index 26d70b6e55..af5c50989c 100644 --- a/nibabel/tests/test_h5py_compat.py +++ b/nibabel/tests/test_h5py_compat.py @@ -40,5 +40,8 @@ def test_disabled_h5py_cases(): # Verify that the root cause is present # If any tests fail, they will likely be these, so they may be # ill-advised... - assert_equal(str(np.longdouble), str(np.float64)) + if LooseVersion(np.__version__) < '1.18': + assert_equal(str(np.longdouble), str(np.float64)) + else: + assert_not_equal(str(np.longdouble), str(np.float64)) assert_not_equal(np.longdouble, np.float64) From cabfc81078d9eebbe375312dbaad9f049c031351 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Jan 2020 12:31:25 -0500 Subject: [PATCH 431/689] FIX: Expand version comparison logic --- nibabel/pkg_info.py | 52 +++++++++++++++++++++++++--------- nibabel/tests/test_pkg_info.py | 6 ++++ 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index c8bff3941d..b4ff68e91d 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -22,14 +22,19 @@ def _cmp(a, b): def cmp_pkg_version(version_str, pkg_version_str=__version__): - """ Compare `version_str` to current package version + """ Compare ``version_str`` to current package version To be valid, a version must have a numerical major version followed by a dot, followed by a numerical minor version. It may optionally be followed by a dot and a numerical micro version, and / or by an "extra" string. - *Any* extra string labels the version as pre-release, so `1.2.0somestring` - compares as prior to (pre-release for) `1.2.0`, where `somestring` can be - any string. + The extra string may further contain a "+". Any value to the left of a "+" + labels the version as pre-release, while values to the right indicate a + post-release relative to the values to the left. That is, + ``1.2.0+1`` is post-release for ``1.2.0``, while ``1.2.0rc1+1`` is + post-release for ``1.2.0rc1`` and pre-release for ``1.2.0``. + + This is an approximation of `PEP-440`_, and future versions will fully + implement PEP-440. Parameters ---------- @@ -50,19 +55,38 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): 1 >>> cmp_pkg_version('1.2.0dev', '1.2.0') -1 + >>> cmp_pkg_version('1.2.0dev', '1.2.0rc1') + -1 + >>> cmp_pkg_version('1.2.0rc1', '1.2.0') + -1 + >>> cmp_pkg_version('1.2.0rc1+1', '1.2.0rc1') + 1 + >>> cmp_pkg_version('1.2.0rc1+1', '1.2.0') + -1 + + .. _`PEP-440`: https://www.python.org/dev/peps/pep-0440/ """ version, extra = _parse_version(version_str) pkg_version, pkg_extra = _parse_version(pkg_version_str) - if version != pkg_version: - return _cmp(StrictVersion(version), StrictVersion(pkg_version)) - if extra.startswith('+') or pkg_extra.startswith('+'): - return (1 if pkg_extra == '' - else -1 if extra == '' - else _cmp(extra, pkg_extra)) - return (0 if extra == pkg_extra - else 1 if extra == '' - else -1 if pkg_extra == '' - else _cmp(extra, pkg_extra)) + + # Normalize versions + quick_check = _cmp(StrictVersion(version), StrictVersion(pkg_version)) + # Nothing further to check + if quick_check != 0 or extra == pkg_extra == '': + return quick_check + + # Before + is pre-release, after + is additional increment + pre, _, post = extra.partition('+') + pkg_pre, _, pkg_post = pkg_extra.partition('+') + quick_check = _cmp(pre, pkg_pre) + if quick_check != 0: # Excludes case where pre and pkg_pre == '' + # Pre-releases are ordered but strictly less than non-pre + return (1 if pre == '' + else -1 if pkg_pre == '' + else quick_check) + + # All else being equal, compare additional information lexically + return _cmp(post, pkg_post) def pkg_commit_hash(pkg_path=None): diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index 2b2ad788da..1cbf5f9096 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -63,8 +63,14 @@ def test_cmp_pkg_version(): ('1.2.1rc', '1.2.1rc1', -1), ('1.2.1b', '1.2.1a', 1), ('1.2.1a', '1.2.1b', -1), + ('1.2.0+1', '1.2', 1), + ('1.2', '1.2.0+1', -1), ('1.2.1+1', '1.2.1', 1), ('1.2.1', '1.2.1+1', -1), + ('1.2.1rc1+1', '1.2.1', -1), + ('1.2.1', '1.2.1rc1+1', 1), + ('1.2.1rc1+1', '1.2.1+1', -1), + ('1.2.1+1', '1.2.1rc1+1', 1), ): assert_equal(cmp_pkg_version(test_ver, pkg_ver), exp_out) assert_raises(ValueError, cmp_pkg_version, 'foo.2') From 50abcb3cd292d2977afca732bcefeee0c7c3184a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Jan 2020 11:17:52 -0500 Subject: [PATCH 432/689] TEST: Numpy changed longdouble str representations in 1.18 --- nibabel/tests/test_h5py_compat.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_h5py_compat.py b/nibabel/tests/test_h5py_compat.py index 26d70b6e55..af5c50989c 100644 --- a/nibabel/tests/test_h5py_compat.py +++ b/nibabel/tests/test_h5py_compat.py @@ -40,5 +40,8 @@ def test_disabled_h5py_cases(): # Verify that the root cause is present # If any tests fail, they will likely be these, so they may be # ill-advised... - assert_equal(str(np.longdouble), str(np.float64)) + if LooseVersion(np.__version__) < '1.18': + assert_equal(str(np.longdouble), str(np.float64)) + else: + assert_not_equal(str(np.longdouble), str(np.float64)) assert_not_equal(np.longdouble, np.float64) From 53893649b206412cabdb079ebc813c69e717e59a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 7 Jan 2020 07:32:32 -0500 Subject: [PATCH 433/689] RF: Improve fallback version check, require PyPA packaging module --- nibabel/pkg_info.py | 49 ++++++++-------------------------- nibabel/tests/test_pkg_info.py | 18 +++++++++---- setup.cfg | 1 + 3 files changed, 25 insertions(+), 43 deletions(-) diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index b4ff68e91d..7be15315d2 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -1,20 +1,10 @@ import sys -import re -from distutils.version import StrictVersion +from packaging.version import Version from . import _version __version__ = _version.get_versions()['version'] -def _parse_version(version_str): - """ Parse version string `version_str` in our format - """ - match = re.match(r'([0-9.]*\d)(.*)', version_str) - if match is None: - raise ValueError('Invalid version ' + version_str) - return match.groups() - - def _cmp(a, b): """ Implementation of ``cmp`` for Python 3 """ @@ -24,18 +14,19 @@ def _cmp(a, b): def cmp_pkg_version(version_str, pkg_version_str=__version__): """ Compare ``version_str`` to current package version - To be valid, a version must have a numerical major version followed by a - dot, followed by a numerical minor version. It may optionally be followed - by a dot and a numerical micro version, and / or by an "extra" string. + This comparator follows `PEP-440`_ conventions for determining version + ordering. + + To be valid, a version must have a numerical major version. It may be + optionally followed by a dot and a numerical minor version, which may, + in turn, optionally be followed by a dot and a numerical micro version, + and / or by an "extra" string. The extra string may further contain a "+". Any value to the left of a "+" labels the version as pre-release, while values to the right indicate a post-release relative to the values to the left. That is, ``1.2.0+1`` is post-release for ``1.2.0``, while ``1.2.0rc1+1`` is post-release for ``1.2.0rc1`` and pre-release for ``1.2.0``. - This is an approximation of `PEP-440`_, and future versions will fully - implement PEP-440. - Parameters ---------- version_str : str @@ -63,30 +54,12 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): 1 >>> cmp_pkg_version('1.2.0rc1+1', '1.2.0') -1 + >>> cmp_pkg_version('1.2.0.post1', '1.2.0') + 1 .. _`PEP-440`: https://www.python.org/dev/peps/pep-0440/ """ - version, extra = _parse_version(version_str) - pkg_version, pkg_extra = _parse_version(pkg_version_str) - - # Normalize versions - quick_check = _cmp(StrictVersion(version), StrictVersion(pkg_version)) - # Nothing further to check - if quick_check != 0 or extra == pkg_extra == '': - return quick_check - - # Before + is pre-release, after + is additional increment - pre, _, post = extra.partition('+') - pkg_pre, _, pkg_post = pkg_extra.partition('+') - quick_check = _cmp(pre, pkg_pre) - if quick_check != 0: # Excludes case where pre and pkg_pre == '' - # Pre-releases are ordered but strictly less than non-pre - return (1 if pre == '' - else -1 if pkg_pre == '' - else quick_check) - - # All else being equal, compare additional information lexically - return _cmp(post, pkg_post) + return _cmp(Version(version_str), Version(pkg_version_str)) def pkg_commit_hash(pkg_path=None): diff --git a/nibabel/tests/test_pkg_info.py b/nibabel/tests/test_pkg_info.py index 1cbf5f9096..4f71d0e1d0 100644 --- a/nibabel/tests/test_pkg_info.py +++ b/nibabel/tests/test_pkg_info.py @@ -1,6 +1,8 @@ """ Testing package info """ +from packaging.version import Version + import nibabel as nib from nibabel.pkg_info import cmp_pkg_version from ..info import VERSION @@ -30,12 +32,14 @@ def test_fallback_version(): This should only fail if we fail to bump nibabel.info.VERSION immediately after release """ + ver = Version(nib.__version__) + fallback = Version(VERSION) assert ( + # Releases have no local information, archive matches versioneer + ver.local is None or # dev version should be larger than tag+commit-githash - cmp_pkg_version(VERSION) >= 0 or - # Allow VERSION bump to lag releases by one commit - VERSION == nib.__version__ + 'dev'), \ - "nibabel.info.VERSION does not match current tag information" + fallback >= ver), \ + "nibabel.info.VERSION does not match latest tag information" def test_cmp_pkg_version(): @@ -76,6 +80,10 @@ def test_cmp_pkg_version(): assert_raises(ValueError, cmp_pkg_version, 'foo.2') assert_raises(ValueError, cmp_pkg_version, 'foo.2', '1.0') assert_raises(ValueError, cmp_pkg_version, '1.0', 'foo.2') - assert_raises(ValueError, cmp_pkg_version, '1') assert_raises(ValueError, cmp_pkg_version, 'foo') + # Check dev/RC sequence + seq = ('3.0.0dev', '3.0.0rc1', '3.0.0rc1.post.dev', '3.0.0rc2', '3.0.0rc2.post.dev', '3.0.0') + for stage1, stage2 in zip(seq[:-1], seq[1:]): + assert_equal(cmp_pkg_version(stage1, stage2), -1) + assert_equal(cmp_pkg_version(stage2, stage1), 1) diff --git a/setup.cfg b/setup.cfg index 8478cc37db..748cd8d990 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,6 +31,7 @@ provides = python_requires = >=3.5.1 install_requires = numpy >=1.12 + packaging tests_require = nose >=0.11 pytest From af56a0932b7d3eec84bf2e34c3214c1d525fa6f4 Mon Sep 17 00:00:00 2001 From: Ben Darwin Date: Fri, 10 Jan 2020 21:23:57 -0500 Subject: [PATCH 434/689] test_proxy_api: replace nonexistent `extend` method call on a tuple with `+=` --- nibabel/tests/test_proxy_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index d7ca111f22..3ab4ad965f 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -345,7 +345,7 @@ class TestNifti1ProxyAPI(TestSpm99AnalyzeProxyAPI): data_dtypes = (np.uint8, np.int16, np.int32, np.float32, np.complex64, np.float64, np.int8, np.uint16, np.uint32, np.int64, np.uint64, np.complex128) if have_binary128(): - data_dtypes.extend(np.float128, np.complex256) + data_dtypes += (np.float128, np.complex256) class TestMGHAPI(TestAnalyzeProxyAPI): From 17c9dc2ffba9003b8e48bbee23408dc4529f308b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 23 Jan 2020 05:14:12 -0500 Subject: [PATCH 435/689] MNT: All supported numpy have float16 --- nibabel/casting.py | 8 +------- nibabel/tests/test_floating.py | 12 +----------- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/nibabel/casting.py b/nibabel/casting.py index 89be788da5..8406824dbe 100644 --- a/nibabel/casting.py +++ b/nibabel/casting.py @@ -171,12 +171,6 @@ def shared_range(flt_type, int_type): # types. # ---------------------------------------------------------------------------- -try: - _float16 = np.float16 -except AttributeError: # float16 not present in np < 1.6 - _float16 = None - - class FloatingError(Exception): pass @@ -242,7 +236,7 @@ def type_info(np_type): minexp=info.minexp, maxexp=info.maxexp, width=width) - if np_type in (_float16, np.float32, np.float64, + if np_type in (np.float16, np.float32, np.float64, np.complex64, np.complex128): return ret info_64 = np.finfo(np.float64) diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index d9401db156..0c4b5a8cb3 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -15,15 +15,7 @@ from nose import SkipTest from nose.tools import assert_equal, assert_raises, assert_true, assert_false -IEEE_floats = [np.float32, np.float64] -try: - np.float16 -except AttributeError: # float16 not present in np < 1.6 - have_float16 = False -else: - have_float16 = True -if have_float16: - IEEE_floats.append(np.float16) +IEEE_floats = [np.float16, np.float32, np.float64] LD_INFO = type_info(np.longdouble) @@ -201,8 +193,6 @@ def test_as_int_np_fix(): def test_floor_exact_16(): # A normal integer can generate an inf in float16 - if not have_float16: - raise SkipTest('No float16') assert_equal(floor_exact(2**31, np.float16), np.inf) assert_equal(floor_exact(-2**31, np.float16), -np.inf) From c01a9d2b2137f761ca809e5422c0079febe017d0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 Jan 2020 11:52:06 -0500 Subject: [PATCH 436/689] MNT: Add Ben Darwin to Zenodo --- .zenodo.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.zenodo.json b/.zenodo.json index 54cbe2f28b..2c2949d862 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -259,6 +259,9 @@ { "name": "Schwartz, Yannick" }, + { + "name": "Darwin, Ben" + }, { "affiliation": "INRIA", "name": "Thirion, Bertrand", From c54672bb731fcc550fa56c7dbb8f9bbfb93eaf9a Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 Jan 2020 11:52:22 -0500 Subject: [PATCH 437/689] DOC: Update changelog, copyright --- Changelog | 16 ++++++++++++++++ doc/source/conf.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/Changelog b/Changelog index de60b8c36d..204d0ddb66 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,22 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. +3.0.1 (Monday 27 January 2020) +============================== + +Bug fixes +--------- +* Test failed by using array method on tuple. (pr/860) (Ben Darwin, reviewed by + CM) +* Validate ``ExpiredDeprecationError``\s, promoted by 3.0 release from + ``DeprecationWarning``\s. (pr/857) (CM) + +Maintenance +----------- +* Remove logic accommodating numpy without float16 types. (pr/866) (CM) +* Accommodate new numpy dtype strings. (pr/858) (CM) + + 3.0.0 (Wednesday 18 December 2019) ================================== diff --git a/doc/source/conf.py b/doc/source/conf.py index cdc773e35a..8225f67e8a 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -99,7 +99,7 @@ # General information about the project. project = u'NiBabel' -copyright = u'2006-2019, %(maintainer)s <%(author_email)s>' % metadata +copyright = u'2006-2020, %(maintainer)s <%(author_email)s>' % metadata # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From 9d5f8da2640158d3e1988d52c96f523f15cc4adf Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 Jan 2020 12:18:34 -0500 Subject: [PATCH 438/689] MNT: Update requirements --- doc-requirements.txt | 2 +- doc/source/installation.rst | 2 +- doc/source/links_names.txt | 1 + setup.cfg | 8 +++++++- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/doc-requirements.txt b/doc-requirements.txt index ccb1b574a9..2036a0d6fb 100644 --- a/doc-requirements.txt +++ b/doc-requirements.txt @@ -3,4 +3,4 @@ sphinx numpydoc texext -matplotlib>=1.3 +matplotlib >=1.3.1 diff --git a/doc/source/installation.rst b/doc/source/installation.rst index b097a04048..a7d6507995 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -89,7 +89,7 @@ Requirements * h5py_ (optional, for MINC2 support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) -* nose_ 0.11 or greater (optional, to run the tests) +* nose_ 0.11 or greater and pytest_ (optional, to run the tests) * sphinx_ (optional, to build the documentation) Get the development sources diff --git a/doc/source/links_names.txt b/doc/source/links_names.txt index 7dc24e2cba..8a936dc72e 100644 --- a/doc/source/links_names.txt +++ b/doc/source/links_names.txt @@ -83,6 +83,7 @@ .. _emacs_python_mode: http://www.emacswiki.org/cgi-bin/wiki/PythonMode .. _doctest-mode: http://ed.loper.org/projects/doctestmode/ .. _nose: http://somethingaboutorange.com/mrl/projects/nose +.. _pytest: https://docs.pytest.org/ .. _`python coverage tester`: http://nedbatchelder.com/code/coverage/ .. _bitbucket: https://bitbucket.org .. _six: http://pythonhosted.org/six diff --git a/setup.cfg b/setup.cfg index 8478cc37db..5543e13d5b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,6 +41,9 @@ packages = find: [options.extras_require] dicom = pydicom >=0.9.9 +dicomfs = + %(dicom)s + pillow dev = gitpython twine @@ -51,6 +54,8 @@ doc = texext minc2 = h5py +spm = + scipy style = flake8 test = @@ -58,10 +63,11 @@ test = nose >=0.11 pytest all = - %(dicom)s + %(dicomfs)s %(dev)s %(doc)s %(minc2)s + %(spm)s %(style)s %(test)s From cf430b099f2d60dfb1aeab41e8eddf1dffbd23db Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 Jan 2020 12:18:51 -0500 Subject: [PATCH 439/689] DOC: Update author list --- doc/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index 8b469631dd..6c851a52bc 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -104,6 +104,7 @@ contributed code and discussion (in rough order of appearance): * Hao-Ting Wang * Dorota Jarecka * Chris Gorgolewski +* Ben Darwin License reprise =============== From 251298a878248ea614d5fd938565058e2e8f4817 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sun, 26 Jan 2020 12:37:52 -0500 Subject: [PATCH 440/689] DOC: Add missing entry from 3.0.0 changelog --- Changelog | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Changelog b/Changelog index 204d0ddb66..c79a23f895 100644 --- a/Changelog +++ b/Changelog @@ -82,6 +82,8 @@ Bug fixes * Sliced ``Tractogram``s no longer ``apply_affine`` to the original ``Tractogram``'s streamlines. (pr/811) (MC, reviewed by Serge Koudoro, Philippe Poulin, CM, MB) +* Change strings with invalid escapes to raw strings (pr/827) (EL, reviewed + by CM) * Re-import externals/netcdf.py from scipy to resolve numpy deprecation (pr/821) (CM) From b6df4c6350195f68331486f6d28a120cedcd93dc Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 26 Jan 2020 16:09:22 -0500 Subject: [PATCH 441/689] MNT: Update Ben Darwin name/affiliation --- .mailmap | 1 + .zenodo.json | 3 ++- doc/source/index.rst | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.mailmap b/.mailmap index 9eaa0d1434..9701ddf503 100644 --- a/.mailmap +++ b/.mailmap @@ -13,6 +13,7 @@ B. Nolan Nichols Nolan Nichols bpinsard Basile Pinsard bpinsard Ben Cipollini Ben Cipollini +Benjamin C Darwin Bertrand Thirion bthirion Cameron Riddell <31414128+CRiddler@users.noreply.github.com> Christian Haselgrove Christian Haselgrove diff --git a/.zenodo.json b/.zenodo.json index 2c2949d862..e81655fe8f 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -260,7 +260,8 @@ "name": "Schwartz, Yannick" }, { - "name": "Darwin, Ben" + "affiliation": "Hospital for Sick Children", + "name": "Darwin, Benjamin C" }, { "affiliation": "INRIA", diff --git a/doc/source/index.rst b/doc/source/index.rst index 6c851a52bc..82c9606ef5 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -104,7 +104,7 @@ contributed code and discussion (in rough order of appearance): * Hao-Ting Wang * Dorota Jarecka * Chris Gorgolewski -* Ben Darwin +* Benjamin C Darwin License reprise =============== From 864fd5eeba26f5013106b488c23c15b732c1ddab Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 27 Jan 2020 09:05:59 -0500 Subject: [PATCH 442/689] MNT: 3.0.2-dev --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index eb2e1fa6d0..d4ebde9960 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -12,7 +12,7 @@ # development (pre-release) version. _version_major = 3 _version_minor = 0 -_version_micro = 1 +_version_micro = 2 _version_extra = 'dev' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" From 0dbd8a0a013bf7718b6a45494dcbf2b62a8e6a1c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 27 Jan 2020 12:05:37 -0500 Subject: [PATCH 443/689] NEP29: Bump minimum numpy to 1.13 --- doc/source/installation.rst | 2 +- min-requirements.txt | 3 ++- requirements.txt | 3 ++- setup.cfg | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/source/installation.rst b/doc/source/installation.rst index a7d6507995..c7be36c658 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -84,7 +84,7 @@ Requirements .. check these against setup.cfg * Python_ 3.5.1 or greater -* NumPy_ 1.12 or greater +* NumPy_ 1.13 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * h5py_ (optional, for MINC2 support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) diff --git a/min-requirements.txt b/min-requirements.txt index ed4ed75bf2..febcba7885 100644 --- a/min-requirements.txt +++ b/min-requirements.txt @@ -1,2 +1,3 @@ # Auto-generated by tools/update_requirements.py -numpy ==1.12 +numpy ==1.13 +packaging diff --git a/requirements.txt b/requirements.txt index 365f19556b..0cac05bb05 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ # Auto-generated by tools/update_requirements.py -numpy >=1.12 +numpy >=1.13 +packaging diff --git a/setup.cfg b/setup.cfg index 0ae5619db2..9dee1a88a8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -30,7 +30,7 @@ provides = [options] python_requires = >=3.5.1 install_requires = - numpy >=1.12 + numpy >=1.13 packaging tests_require = nose >=0.11 From d388beddd83299c53ede9e6d4d23e3762b45493e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 27 Jan 2020 16:14:18 -0500 Subject: [PATCH 444/689] MNT: Minimum packaging of 14.3 --- doc/source/installation.rst | 1 + doc/source/links_names.txt | 1 + min-requirements.txt | 2 +- requirements.txt | 2 +- setup.cfg | 2 +- 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/source/installation.rst b/doc/source/installation.rst index c7be36c658..ed390578ff 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -85,6 +85,7 @@ Requirements * Python_ 3.5.1 or greater * NumPy_ 1.13 or greater +* Packaging_ 14.3 or greater * SciPy_ (optional, for full SPM-ANALYZE support) * h5py_ (optional, for MINC2 support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) diff --git a/doc/source/links_names.txt b/doc/source/links_names.txt index 8a936dc72e..1a1b688cd4 100644 --- a/doc/source/links_names.txt +++ b/doc/source/links_names.txt @@ -113,6 +113,7 @@ .. _datapkg: https://pythonhosted.org/datapkg/ .. _python imaging library: https://pypi.python.org/pypi/Pillow .. _h5py: https://www.h5py.org/ +.. _packaging: https://packaging.pypa.io .. Python imaging projects .. _PyMVPA: http://www.pymvpa.org diff --git a/min-requirements.txt b/min-requirements.txt index febcba7885..0d749072bf 100644 --- a/min-requirements.txt +++ b/min-requirements.txt @@ -1,3 +1,3 @@ # Auto-generated by tools/update_requirements.py numpy ==1.13 -packaging +packaging ==14.3 diff --git a/requirements.txt b/requirements.txt index 0cac05bb05..3134ffd33c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ # Auto-generated by tools/update_requirements.py numpy >=1.13 -packaging +packaging >=14.3 diff --git a/setup.cfg b/setup.cfg index 9dee1a88a8..d425dd2371 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ provides = python_requires = >=3.5.1 install_requires = numpy >=1.13 - packaging + packaging >=14.3 tests_require = nose >=0.11 pytest From 5f8d68daaded798361d8a1e951da80de3f2e4a9b Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Mon, 11 Nov 2019 18:04:19 -0500 Subject: [PATCH 445/689] converting tests with tests/test_a*.py and test_wrapstruct (for some reason cant run the tests in one pytest command, so added an extra line to travis for now --- .travis.yml | 2 + nibabel/tests/test_analyze.py | 289 ++++++++++++++------------- nibabel/tests/test_api_validators.py | 7 +- nibabel/tests/test_arrayproxy.py | 96 +++++---- nibabel/tests/test_arraywriters.py | 235 ++++++++++++---------- nibabel/tests/test_wrapstruct.py | 178 +++++++++-------- 6 files changed, 430 insertions(+), 377 deletions(-) diff --git a/.travis.yml b/.travis.yml index 81d3589769..def7ca2369 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,6 +130,8 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + pytest -v ../nibabel/tests/test_a*.py + pytest -v ../nibabel/tests/test_w*.py else false fi diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 6b05df83e3..b0cc566979 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -31,14 +31,10 @@ from ..tmpdirs import InTemporaryDirectory from ..arraywriters import WriterError -from nose.tools import (assert_equal, assert_not_equal, assert_true, - assert_false, assert_raises) - +import pytest from numpy.testing import (assert_array_equal, assert_array_almost_equal) -from ..testing import (assert_equal, assert_not_equal, assert_true, - assert_false, assert_raises, data_path, - suppress_warnings, assert_dt_equal) +from ..testing_pytest import (data_path, suppress_warnings, assert_dt_equal) from .test_wrapstruct import _TestLabeledWrapStruct from . import test_spatialimages as tsi @@ -71,7 +67,7 @@ class TestAnalyzeHeader(_TestLabeledWrapStruct): def test_supported_types(self): hdr = self.header_class() - assert_equal(self.supported_np_types, + assert (self.supported_np_types == supported_np_types(hdr)) def get_bad_bb(self): @@ -84,7 +80,7 @@ def test_general_init(self): hdr = self.header_class() # an empty header has shape (0,) - like an empty array # (np.array([])) - assert_equal(hdr.get_data_shape(), (0,)) + assert hdr.get_data_shape() == (0,) # The affine is always homogenous 3D regardless of shape. The # default affine will have -1 as the X zoom iff default_x_flip # is True (which it is by default). We have to be careful of the @@ -93,20 +89,20 @@ def test_general_init(self): assert_array_equal(np.diag(hdr.get_base_affine()), [-1, 1, 1, 1]) # But zooms only go with number of dimensions - assert_equal(hdr.get_zooms(), (1.0,)) + assert hdr.get_zooms() == (1.0,) def test_header_size(self): - assert_equal(self.header_class.template_dtype.itemsize, self.sizeof_hdr) + assert self.header_class.template_dtype.itemsize == self.sizeof_hdr def test_empty(self): hdr = self.header_class() - assert_true(len(hdr.binaryblock) == self.sizeof_hdr) - assert_true(hdr['sizeof_hdr'] == self.sizeof_hdr) - assert_true(np.all(hdr['dim'][1:] == 1)) - assert_true(hdr['dim'][0] == 0) - assert_true(np.all(hdr['pixdim'] == 1)) - assert_true(hdr['datatype'] == 16) # float32 - assert_true(hdr['bitpix'] == 32) + assert len(hdr.binaryblock) == self.sizeof_hdr + assert hdr['sizeof_hdr'] == self.sizeof_hdr + assert np.all(hdr['dim'][1:] == 1) + assert hdr['dim'][0] == 0 + assert np.all(hdr['pixdim'] == 1) + assert hdr['datatype'] == 16 # float32 + assert hdr['bitpix'] == 32 def _set_something_into_hdr(self, hdr): # Called from test_bytes test method. Specific to the header data type @@ -117,26 +113,26 @@ def test_checks(self): # Test header checks hdr_t = self.header_class() # _dxer just returns the diagnostics as a string - assert_equal(self._dxer(hdr_t), '') + assert self._dxer(hdr_t) == '' hdr = hdr_t.copy() hdr['sizeof_hdr'] = 1 with suppress_warnings(): - assert_equal(self._dxer(hdr), 'sizeof_hdr should be ' + + assert (self._dxer(hdr) == 'sizeof_hdr should be ' + str(self.sizeof_hdr)) hdr = hdr_t.copy() hdr['datatype'] = 0 - assert_equal(self._dxer(hdr), 'data code 0 not supported\n' + assert (self._dxer(hdr) == 'data code 0 not supported\n' 'bitpix does not match datatype') hdr = hdr_t.copy() hdr['bitpix'] = 0 - assert_equal(self._dxer(hdr), 'bitpix does not match datatype') + assert self._dxer(hdr) == 'bitpix does not match datatype' def test_pixdim_checks(self): hdr_t = self.header_class() for i in (1, 2, 3): hdr = hdr_t.copy() hdr['pixdim'][i] = -1 - assert_equal(self._dxer(hdr), 'pixdim[1,2,3] should be positive') + assert self._dxer(hdr) == 'pixdim[1,2,3] should be positive' def test_log_checks(self): # Test logging, fixing, errors for header checking @@ -146,11 +142,13 @@ def test_log_checks(self): with suppress_warnings(): hdr['sizeof_hdr'] = 350 # severity 30 fhdr, message, raiser = self.log_chk(hdr, 30) - assert_equal(fhdr['sizeof_hdr'], self.sizeof_hdr) - assert_equal(message, + + assert fhdr['sizeof_hdr'] == self.sizeof_hdr + assert (message == 'sizeof_hdr should be {0}; set sizeof_hdr to {0}'.format( self.sizeof_hdr)) - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) # RGB datatype does not raise error hdr = HC() hdr.set_data_dtype('RGB') @@ -160,25 +158,28 @@ def test_log_checks(self): hdr['datatype'] = -1 # severity 40 with suppress_warnings(): fhdr, message, raiser = self.log_chk(hdr, 40) - assert_equal(message, 'data code -1 not recognized; ' + assert (message == 'data code -1 not recognized; ' 'not attempting fix') - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) # datatype not supported hdr['datatype'] = 255 # severity 40 fhdr, message, raiser = self.log_chk(hdr, 40) - assert_equal(message, 'data code 255 not supported; ' + assert (message == 'data code 255 not supported; ' 'not attempting fix') - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) # bitpix hdr = HC() hdr['datatype'] = 16 # float32 hdr['bitpix'] = 16 # severity 10 fhdr, message, raiser = self.log_chk(hdr, 10) - assert_equal(fhdr['bitpix'], 32) - assert_equal(message, 'bitpix does not match datatype; ' + assert fhdr['bitpix'] == 32 + assert (message == 'bitpix does not match datatype; ' 'setting bitpix to match datatype') - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) def test_pixdim_log_checks(self): # pixdim positive @@ -186,28 +187,31 @@ def test_pixdim_log_checks(self): hdr = HC() hdr['pixdim'][1] = -2 # severity 35 fhdr, message, raiser = self.log_chk(hdr, 35) - assert_equal(fhdr['pixdim'][1], 2) - assert_equal(message, 'pixdim[1,2,3] should be positive; ' + assert fhdr['pixdim'][1] == 2 + assert (message == 'pixdim[1,2,3] should be positive; ' 'setting to abs of pixdim values') - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) hdr = HC() hdr['pixdim'][1] = 0 # severity 30 fhdr, message, raiser = self.log_chk(hdr, 30) - assert_equal(fhdr['pixdim'][1], 1) - assert_equal(message, PIXDIM0_MSG) - assert_raises(*raiser) + assert fhdr['pixdim'][1] == 1 + assert message == PIXDIM0_MSG + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) # both hdr = HC() hdr['pixdim'][1] = 0 # severity 30 hdr['pixdim'][2] = -2 # severity 35 fhdr, message, raiser = self.log_chk(hdr, 35) - assert_equal(fhdr['pixdim'][1], 1) - assert_equal(fhdr['pixdim'][2], 2) - assert_equal(message, 'pixdim[1,2,3] should be ' + assert fhdr['pixdim'][1] == 1 + assert fhdr['pixdim'][2] == 2 + assert (message == 'pixdim[1,2,3] should be ' 'non-zero and pixdim[1,2,3] should ' 'be positive; setting 0 dims to 1 ' 'and setting to abs of pixdim values') - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) def test_no_scaling_fixes(self): # Check we do not fix slope or intercept @@ -248,12 +252,13 @@ def test_logger_error(self): # Check log message appears in new logger imageglobals.logger = logger hdr.copy().check_fix() - assert_equal(str_io.getvalue(), + assert (str_io.getvalue() == 'bitpix does not match datatype; ' 'setting bitpix to match datatype\n') # Check that error_level in fact causes error to be raised imageglobals.error_level = 10 - assert_raises(HeaderDataError, hdr.copy().check_fix) + with pytest.raises(HeaderDataError): + hdr.copy().check_fix() finally: imageglobals.logger, imageglobals.error_level = log_cache @@ -304,55 +309,58 @@ def assert_set_dtype(dt_spec, np_dtype): assert_set_dtype(int, np_sys_int) hdr = self.header_class() for inp in all_unsupported_types: - assert_raises(HeaderDataError, hdr.set_data_dtype, inp) + with pytest.raises(HeaderDataError): + hdr.set_data_dtype(inp) def test_shapes(self): # Test that shape checks work hdr = self.header_class() for shape in ((2, 3, 4), (2, 3, 4, 5), (2, 3), (2,)): hdr.set_data_shape(shape) - assert_equal(hdr.get_data_shape(), shape) + assert hdr.get_data_shape() == shape # Check max works, but max+1 raises error dim_dtype = hdr.structarr['dim'].dtype # as_int for safety to deal with numpy 1.4.1 int conversion errors mx = as_int(np.iinfo(dim_dtype).max) shape = (mx,) hdr.set_data_shape(shape) - assert_equal(hdr.get_data_shape(), shape) + assert hdr.get_data_shape() == shape shape = (mx + 1,) - assert_raises(HeaderDataError, hdr.set_data_shape, shape) + with pytest.raises(HeaderDataError): + hdr.set_data_shape(shape) # Lists or tuples or arrays will work for setting shape shape = (2, 3, 4) for constructor in (list, tuple, np.array): hdr.set_data_shape(constructor(shape)) - assert_equal(hdr.get_data_shape(), shape) + assert hdr.get_data_shape() == shape def test_read_write_data(self): # Check reading and writing of data hdr = self.header_class() # Trying to read data from an empty header gives no data bytes = hdr.data_from_fileobj(BytesIO()) - assert_equal(len(bytes), 0) + assert len(bytes) == 0 # Setting no data into an empty header results in - no data str_io = BytesIO() hdr.data_to_fileobj([], str_io) - assert_equal(str_io.getvalue(), b'') + assert str_io.getvalue() == b'' # Setting more data then there should be gives an error - assert_raises(HeaderDataError, - hdr.data_to_fileobj, - np.zeros(3), - str_io) + with pytest.raises(HeaderDataError): + hdr.data_to_fileobj(np.zeros(3), str_io) # Test valid write hdr.set_data_shape((1, 2, 3)) hdr.set_data_dtype(np.float32) S = BytesIO() data = np.arange(6, dtype=np.float64) # data have to be the right shape - assert_raises(HeaderDataError, hdr.data_to_fileobj, data, S) + with pytest.raises(HeaderDataError): + hdr.data_to_fileobj(data, S) data = data.reshape((1, 2, 3)) # and size - assert_raises(HeaderDataError, hdr.data_to_fileobj, data[:, :, :-1], S) - assert_raises(HeaderDataError, hdr.data_to_fileobj, data[:, :-1, :], S) + with pytest.raises(HeaderDataError): + hdr.data_to_fileobj(data[:, :, :-1], S) + with pytest.raises(HeaderDataError): + hdr.data_to_fileobj(data[:, :-1, :], S) # OK if so hdr.data_to_fileobj(data, S) # Read it back @@ -360,7 +368,7 @@ def test_read_write_data(self): # Should be about the same assert_array_almost_equal(data, data_back) # but with the header dtype, not the data dtype - assert_equal(hdr.get_data_dtype(), data_back.dtype) + assert hdr.get_data_dtype() == data_back.dtype # this is with native endian, not so for swapped S2 = BytesIO() hdr2 = hdr.as_byteswapped() @@ -371,9 +379,9 @@ def test_read_write_data(self): # Compares the same assert_array_almost_equal(data_back, data_back2) # Same dtype names - assert_equal(data_back.dtype.name, data_back2.dtype.name) + assert data_back.dtype.name == data_back2.dtype.name # But not the same endianness - assert_not_equal(data.dtype.byteorder, data_back2.dtype.byteorder) + assert data.dtype.byteorder != data_back2.dtype.byteorder # Try scaling down to integer hdr.set_data_dtype(np.uint8) S3 = BytesIO() @@ -385,15 +393,16 @@ def test_read_write_data(self): assert_array_almost_equal(data, data_back) # If the header can't do scaling, rescale raises an error if not hdr.has_data_slope: - assert_raises(HeaderTypeError, hdr.data_to_fileobj, data, S3) - assert_raises(HeaderTypeError, hdr.data_to_fileobj, data, S3, - rescale=True) + with pytest.raises(HeaderTypeError): + hdr.data_to_fileobj(data, S3) + with pytest.raises(HeaderTypeError): + hdr.data_to_fileobj(data, S3, rescale=True) # If not scaling we lose precision from rounding data = np.arange(6, dtype=np.float64).reshape((1, 2, 3)) + 0.5 with np.errstate(invalid='ignore'): hdr.data_to_fileobj(data, S3, rescale=False) data_back = hdr.data_from_fileobj(S3) - assert_false(np.allclose(data, data_back)) + assert not np.allclose(data, data_back) # Test RGB image dtype = np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')]) data = np.ones((1, 2, 3), dtype) @@ -409,26 +418,24 @@ def test_datatype(self): for code in codes.value_set(): npt = codes.type[code] if npt is np.void: - assert_raises( - HeaderDataError, - ehdr.set_data_dtype, - code) + with pytest.raises(HeaderDataError): + ehdr.set_data_dtype(code) continue dt = codes.dtype[code] ehdr.set_data_dtype(npt) - assert_true(ehdr['datatype'] == code) - assert_true(ehdr['bitpix'] == dt.itemsize * 8) + assert ehdr['datatype'] == code + assert ehdr['bitpix'] == dt.itemsize * 8 ehdr.set_data_dtype(code) - assert_true(ehdr['datatype'] == code) + assert ehdr['datatype'] == code ehdr.set_data_dtype(dt) - assert_true(ehdr['datatype'] == code) + assert ehdr['datatype'] == code def test_offset(self): # Test get / set offset hdr = self.header_class() offset = hdr.get_data_offset() hdr.set_data_offset(offset + 16) - assert_equal(hdr.get_data_offset(), offset + 16) + assert hdr.get_data_offset() == offset + 16 def test_data_shape_zooms_affine(self): hdr = self.header_class() @@ -436,27 +443,23 @@ def test_data_shape_zooms_affine(self): L = len(shape) hdr.set_data_shape(shape) if L: - assert_equal(hdr.get_data_shape(), shape) + assert hdr.get_data_shape() == shape else: - assert_equal(hdr.get_data_shape(), (0,)) + assert hdr.get_data_shape() == (0,) # Default zoom - for 3D - is 1(()) - assert_equal(hdr.get_zooms(), (1,) * L) + assert hdr.get_zooms() == (1,) * L # errors if zooms do not match shape if len(shape): - assert_raises(HeaderDataError, - hdr.set_zooms, - (1,) * (L - 1)) + with pytest.raises(HeaderDataError): + hdr.set_zooms((1,) * (L - 1)) # Errors for negative zooms - assert_raises(HeaderDataError, - hdr.set_zooms, - (-1,) + (1,) * (L - 1)) - assert_raises(HeaderDataError, - hdr.set_zooms, - (1,) * (L + 1)) + with pytest.raises(HeaderDataError): + hdr.set_zooms((-1,) + (1,) * (L - 1)) + with pytest.raises(HeaderDataError): + hdr.set_zooms((1,) * (L + 1)) # Errors for negative zooms - assert_raises(HeaderDataError, - hdr.set_zooms, - (-1,) * L) + with pytest.raises(HeaderDataError): + hdr.set_zooms((-1,) * L) # reducing the dimensionality of the array and then increasing # it again reverts the previously set zoom values to 1.0 hdr = self.header_class() @@ -489,20 +492,20 @@ def test_default_x_flip(self): def test_from_eg_file(self): fileobj = open(self.example_file, 'rb') hdr = self.header_class.from_fileobj(fileobj, check=False) - assert_equal(hdr.endianness, '>') - assert_equal(hdr['sizeof_hdr'], self.sizeof_hdr) + assert hdr.endianness == '>' + assert hdr['sizeof_hdr'] == self.sizeof_hdr def test_orientation(self): # Test flips hdr = self.header_class() - assert_true(hdr.default_x_flip) + assert hdr.default_x_flip hdr.set_data_shape((3, 5, 7)) hdr.set_zooms((4, 5, 6)) aff = np.diag((-4, 5, 6, 1)) aff[:3, 3] = np.array([1, 2, 3]) * np.array([-4, 5, 6]) * -1 assert_array_equal(hdr.get_base_affine(), aff) hdr.default_x_flip = False - assert_false(hdr.default_x_flip) + assert not hdr.default_x_flip aff[0] *= -1 assert_array_equal(hdr.get_base_affine(), aff) @@ -512,23 +515,23 @@ def test_str(self): s1 = str(hdr) # check the datacode recoding rexp = re.compile('^datatype +: float32', re.MULTILINE) - assert_true(rexp.search(s1) is not None) + assert rexp.search(s1) is not None def test_from_header(self): # check from header class method. klass = self.header_class empty = klass.from_header() - assert_equal(klass(), empty) + assert klass() == empty empty = klass.from_header(None) - assert_equal(klass(), empty) + assert klass() == empty hdr = klass() hdr.set_data_dtype(np.float64) hdr.set_data_shape((1, 2, 3)) hdr.set_zooms((3.0, 2.0, 1.0)) for check in (True, False): copy = klass.from_header(hdr, check=check) - assert_equal(hdr, copy) - assert_false(hdr is copy) + assert hdr == copy + assert not hdr is copy class C(object): @@ -538,17 +541,17 @@ def get_data_shape(self): return (5, 4, 3) def get_zooms(self): return (10.0, 9.0, 8.0) converted = klass.from_header(C()) - assert_true(isinstance(converted, klass)) - assert_equal(converted.get_data_dtype(), np.dtype('i2')) - assert_equal(converted.get_data_shape(), (5, 4, 3)) - assert_equal(converted.get_zooms(), (10.0, 9.0, 8.0)) + assert isinstance(converted, klass) + assert converted.get_data_dtype() == np.dtype('i2') + assert converted.get_data_shape() == (5, 4, 3) + assert converted.get_zooms() == (10.0, 9.0, 8.0) def test_base_affine(self): klass = self.header_class hdr = klass() hdr.set_data_shape((3, 5, 7)) hdr.set_zooms((3, 2, 1)) - assert_true(hdr.default_x_flip) + assert hdr.default_x_flip assert_array_almost_equal( hdr.get_base_affine(), [[-3., 0., 0., 3.], @@ -574,7 +577,7 @@ def test_scaling(self): # Test integer scaling from float # Analyze headers cannot do float-integer scaling hdr = self.header_class() - assert_true(hdr.default_x_flip) + assert hdr.default_x_flip shape = (1, 2, 3) hdr.set_data_shape(shape) hdr.set_data_dtype(np.float32) @@ -588,22 +591,23 @@ def test_scaling(self): hdr.set_data_dtype(np.int32) # Writing to int needs scaling, and raises an error if we can't scale if not hdr.has_data_slope: - assert_raises(HeaderTypeError, hdr.data_to_fileobj, data, BytesIO()) + with pytest.raises(HeaderTypeError): + hdr.data_to_fileobj(data, BytesIO()) # But if we aren't scaling, convert the floats to integers and write with np.errstate(invalid='ignore'): hdr.data_to_fileobj(data, S, rescale=False) rdata = hdr.data_from_fileobj(S) - assert_true(np.allclose(data, rdata)) + assert np.allclose(data, rdata) # This won't work for floats that aren't close to integers data_p5 = data + 0.5 with np.errstate(invalid='ignore'): hdr.data_to_fileobj(data_p5, S, rescale=False) rdata = hdr.data_from_fileobj(S) - assert_false(np.allclose(data_p5, rdata)) + assert not np.allclose(data_p5, rdata) def test_slope_inter(self): hdr = self.header_class() - assert_equal(hdr.get_slope_inter(), (None, None)) + assert hdr.get_slope_inter() == (None, None) for slinter in ((None,), (None, None), (np.nan, np.nan), @@ -614,9 +618,11 @@ def test_slope_inter(self): (None, 0), (1.0, 0)): hdr.set_slope_inter(*slinter) - assert_equal(hdr.get_slope_inter(), (None, None)) - assert_raises(HeaderTypeError, hdr.set_slope_inter, 1.1) - assert_raises(HeaderTypeError, hdr.set_slope_inter, 1.0, 0.1) + assert hdr.get_slope_inter() == (None, None) + with pytest.raises(HeaderTypeError): + hdr.set_slope_inter(1.1) + with pytest.raises(HeaderTypeError): + hdr.set_slope_inter(1.0, 0.1) def test_from_analyze_map(self): # Test that any header can pass values from a mapping @@ -625,19 +631,22 @@ def test_from_analyze_map(self): class H1(object): pass - assert_raises(AttributeError, klass.from_header, H1()) + with pytest.raises(AttributeError): + klass.from_header(H1()) class H2(object): def get_data_dtype(self): return np.dtype('u1') - assert_raises(AttributeError, klass.from_header, H2()) + with pytest.raises(AttributeError): + klass.from_header(H2()) class H3(H2): def get_data_shape(self): return (2, 3, 4) - assert_raises(AttributeError, klass.from_header, H3()) + with pytest.raises(AttributeError): + klass.from_header(H3()) class H4(H3): @@ -647,7 +656,7 @@ def get_zooms(self): exp_hdr.set_data_dtype(np.dtype('u1')) exp_hdr.set_data_shape((2, 3, 4)) exp_hdr.set_zooms((4, 5, 6)) - assert_equal(klass.from_header(H4()), exp_hdr) + assert klass.from_header(H4()) == exp_hdr # cal_max, cal_min get properly set from ``as_analyze_map`` class H5(H4): @@ -656,7 +665,7 @@ def as_analyze_map(self): return dict(cal_min=-100, cal_max=100) exp_hdr['cal_min'] = -100 exp_hdr['cal_max'] = 100 - assert_equal(klass.from_header(H5()), exp_hdr) + assert klass.from_header(H5()) == exp_hdr # set_* methods override fields fron header class H6(H5): @@ -664,7 +673,7 @@ class H6(H5): def as_analyze_map(self): return dict(datatype=4, bitpix=32, cal_min=-100, cal_max=100) - assert_equal(klass.from_header(H6()), exp_hdr) + assert klass.from_header(H6()) == exp_hdr # Any mapping will do, including a Nifti header class H7(H5): @@ -677,7 +686,7 @@ def as_analyze_map(self): return n_hdr # Values from methods still override values from header (shape, dtype, # zooms still at defaults from n_hdr header fields above) - assert_equal(klass.from_header(H7()), exp_hdr) + assert klass.from_header(H7()) == exp_hdr def test_best_affine(): @@ -691,7 +700,8 @@ def test_data_code_error(): # test analyze raising error for unsupported codes hdr = Nifti1Header() hdr['datatype'] = 256 - assert_raises(HeaderDataError, AnalyzeHeader.from_header, hdr) + with pytest.raises(HeaderDataError): + AnalyzeHeader.from_header(hdr) class TestAnalyzeImage(tsi.TestSpatialImage, tsi.MmapImageMixin): @@ -701,7 +711,7 @@ class TestAnalyzeImage(tsi.TestSpatialImage, tsi.MmapImageMixin): def test_supported_types(self): img = self.image_class(np.zeros((2, 3, 4)), np.eye(4)) - assert_equal(self.supported_np_types, + assert (self.supported_np_types == supported_np_types(img)) def test_default_header(self): @@ -713,7 +723,7 @@ def test_default_header(self): hdr.set_data_dtype(arr.dtype) hdr.set_data_offset(0) hdr.set_slope_inter(np.nan, np.nan) - assert_equal(img.header, hdr) + assert img.header == hdr def test_data_hdr_cache(self): # test the API for loaded images, such that the data returned @@ -732,21 +742,21 @@ def test_data_hdr_cache(self): img = IC(data, affine, hdr) img.to_file_map(fm) img2 = IC.from_file_map(fm) - assert_equal(img2.shape, shape) - assert_equal(img2.get_data_dtype().type, np.int16) + assert img2.shape == shape + assert img2.get_data_dtype().type == np.int16 hdr = img2.header hdr.set_data_shape((3, 2, 2)) - assert_equal(hdr.get_data_shape(), (3, 2, 2)) + assert hdr.get_data_shape() == (3, 2, 2) hdr.set_data_dtype(np.uint8) - assert_equal(hdr.get_data_dtype(), np.dtype(np.uint8)) + assert hdr.get_data_dtype() == np.dtype(np.uint8) assert_array_equal(img2.get_fdata(), data) assert_array_equal(np.asanyarray(img2.dataobj), data) # now check read_img_data function - here we do see the changed # header sc_data = read_img_data(img2) - assert_equal(sc_data.shape, (3, 2, 2)) + assert sc_data.shape == (3, 2, 2) us_data = read_img_data(img2, prefer='unscaled') - assert_equal(us_data.shape, (3, 2, 2)) + assert us_data.shape == (3, 2, 2) def test_affine_44(self): IC = self.image_class @@ -760,7 +770,8 @@ def test_affine_44(self): img = IC(data, affine.tolist()) assert_array_equal(affine, img.affine) # Not OK - affine wrong shape - assert_raises(ValueError, IC, data, np.diag([2, 3, 4])) + with pytest.raises(ValueError): + IC(data, np.diag([2, 3, 4])) def test_offset_to_zero(self): # Check offset is always set to zero when creating images @@ -768,24 +779,24 @@ def test_offset_to_zero(self): arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) aff = np.eye(4) img = img_klass(arr, aff) - assert_equal(img.header.get_data_offset(), 0) + assert img.header.get_data_offset() == 0 # Save to BytesIO object(s), make sure offset still zero bytes_map = bytesio_filemap(img_klass) img.to_file_map(bytes_map) - assert_equal(img.header.get_data_offset(), 0) + assert img.header.get_data_offset() == 0 # Set offset in in-memory image big_off = 1024 img.header.set_data_offset(big_off) - assert_equal(img.header.get_data_offset(), big_off) + assert img.header.get_data_offset() == big_off # Offset is in proxy but not in image after saving to fileobj img_rt = bytesio_round_trip(img) - assert_equal(img_rt.dataobj.offset, big_off) - assert_equal(img_rt.header.get_data_offset(), 0) + assert img_rt.dataobj.offset == big_off + assert img_rt.header.get_data_offset() == 0 # The original header still has the big_off value img.header.set_data_offset(big_off) # Making a new image with this header resets to zero img_again = img_klass(arr, aff, img.header) - assert_equal(img_again.header.get_data_offset(), 0) + assert img_again.header.get_data_offset() == 0 def test_big_offset_exts(self): # Check writing offset beyond data works for different file extensions @@ -845,7 +856,7 @@ def test_pickle(self): img_str = pickle.dumps(img) img2 = pickle.loads(img_str) assert_array_equal(img.get_fdata(), img2.get_fdata()) - assert_equal(img.header, img2.header) + assert img.header == img2.header # Save / reload using bytes IO objects for key, value in img.file_map.items(): value.fileobj = BytesIO() @@ -864,10 +875,11 @@ def test_no_finite_values(self): data[:, 2] = -np.inf img = self.image_class(data, None) img.set_data_dtype(np.int16) - assert_equal(img.get_data_dtype(), np.dtype(np.int16)) + assert img.get_data_dtype() == np.dtype(np.int16) fm = bytesio_filemap(img) if not img.header.has_data_slope: - assert_raises(WriterError, img.to_file_map, fm) + with pytest.raises(WriterError): + img.to_file_map(fm) return img.to_file_map(fm) img_back = self.image_class.from_file_map(fm) @@ -879,4 +891,5 @@ def test_unsupported(): data = np.arange(24, dtype=np.int32).reshape((2, 3, 4)) affine = np.eye(4) data = np.arange(24, dtype=np.uint32).reshape((2, 3, 4)) - assert_raises(HeaderDataError, AnalyzeImage, data, affine) + with pytest.raises(HeaderDataError): + AnalyzeImage(data, affine) diff --git a/nibabel/tests/test_api_validators.py b/nibabel/tests/test_api_validators.py index 41d3f41110..a4d23aaefd 100644 --- a/nibabel/tests/test_api_validators.py +++ b/nibabel/tests/test_api_validators.py @@ -1,7 +1,6 @@ """ Metaclass and class for validating instance APIs """ -from nose.tools import assert_equal class validator2test(type): @@ -79,8 +78,8 @@ def validate_something(self, obj, params): The metaclass sets up a ``test_something`` function that runs these checks on each ( """ - assert_equal(obj.var, params['var']) - assert_equal(obj.get_var(), params['var']) + assert obj.var == params['var'] + assert obj.get_var() == params['var'] class TestRunAllTests(ValidateAPI): @@ -102,4 +101,4 @@ def validate_second(self, obj, param): def teardown(): # Check that both validate_xxx tests got run - assert_equal(TestRunAllTests.run_tests, ['first', 'second']) + assert TestRunAllTests.run_tests == ['first', 'second'] diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index 527f9b8f91..b00af7d90f 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -26,9 +26,8 @@ from unittest import mock from numpy.testing import assert_array_equal, assert_array_almost_equal -from nose.tools import (assert_true, assert_false, assert_equal, - assert_not_equal, assert_raises) -from nibabel.testing import memmap_after_ufunc +import pytest +from ..testing_pytest import memmap_after_ufunc from .test_fileslice import slicer_samples from .test_openers import patch_indexed_gzip @@ -70,15 +69,16 @@ def test_init(): bio.write(arr.tostring(order='F')) hdr = FunkyHeader(shape) ap = ArrayProxy(bio, hdr) - assert_true(ap.file_like is bio) - assert_equal(ap.shape, shape) + assert ap.file_like is bio + assert ap.shape == shape # shape should be read only - assert_raises(AttributeError, setattr, ap, 'shape', shape) + with pytest.raises(AttributeError): + setattr(ap, 'shape', shape) # Get the data assert_array_equal(np.asarray(ap), arr) # Check we can modify the original header without changing the ap version hdr.shape[0] = 6 - assert_not_equal(ap.shape, shape) + assert ap.shape != shape # Data stays the same, also assert_array_equal(np.asarray(ap), arr) # C order also possible @@ -88,7 +88,8 @@ def test_init(): ap = CArrayProxy(bio, FunkyHeader((2, 3, 4))) assert_array_equal(np.asarray(ap), arr) # Illegal init - assert_raises(TypeError, ArrayProxy, bio, object()) + with pytest.raises(TypeError): + ArrayProxy(bio, object()) def test_tuplespec(): @@ -106,7 +107,7 @@ def test_tuplespec(): ap_tuple = ArrayProxy(bio, tuple_spec) # Header and tuple specs produce identical behavior for prop in ('shape', 'dtype', 'offset', 'slope', 'inter', 'is_proxy'): - assert_equal(getattr(ap_header, prop), getattr(ap_tuple, prop)) + assert getattr(ap_header, prop) == getattr(ap_tuple, prop) for method, args in (('get_unscaled', ()), ('__array__', ()), ('__getitem__', ((0, 2, 1), )) ): @@ -116,9 +117,12 @@ def test_tuplespec(): for n in range(2, 5): ArrayProxy(bio, tuple_spec[:n]) # Bad tuple lengths - assert_raises(TypeError, ArrayProxy, bio, ()) - assert_raises(TypeError, ArrayProxy, bio, tuple_spec[:1]) - assert_raises(TypeError, ArrayProxy, bio, tuple_spec + ('error',)) + with pytest.raises(TypeError): + ArrayProxy(bio, ()) + with pytest.raises(TypeError): + ArrayProxy(bio, tuple_spec[:1]) + with pytest.raises(TypeError): + ArrayProxy(bio, tuple_spec + ('error',)) def write_raw_data(arr, hdr, fileobj): @@ -136,8 +140,8 @@ def test_nifti1_init(): write_raw_data(arr, hdr, bio) hdr.set_slope_inter(2, 10) ap = ArrayProxy(bio, hdr) - assert_true(ap.file_like == bio) - assert_equal(ap.shape, shape) + assert ap.file_like == bio + assert ap.shape == shape # Get the data assert_array_equal(np.asarray(ap), arr * 2.0 + 10) with InTemporaryDirectory(): @@ -145,8 +149,8 @@ def test_nifti1_init(): write_raw_data(arr, hdr, f) f.close() ap = ArrayProxy('test.nii', hdr) - assert_true(ap.file_like == 'test.nii') - assert_equal(ap.shape, shape) + assert ap.file_like == 'test.nii' + assert ap.shape == shape assert_array_equal(np.asarray(ap), arr * 2.0 + 10) @@ -182,14 +186,14 @@ def test_is_proxy(): hdr = FunkyHeader((2, 3, 4)) bio = BytesIO() prox = ArrayProxy(bio, hdr) - assert_true(is_proxy(prox)) - assert_false(is_proxy(bio)) - assert_false(is_proxy(hdr)) - assert_false(is_proxy(np.zeros((2, 3, 4)))) + assert is_proxy(prox) + assert not is_proxy(bio) + assert not is_proxy(hdr) + assert not is_proxy(np.zeros((2, 3, 4))) class NP(object): is_proxy = False - assert_false(is_proxy(NP())) + assert not is_proxy(NP()) def test_reshape_dataobj(): @@ -203,11 +207,11 @@ def test_reshape_dataobj(): assert_array_equal(prox, arr) assert_array_equal(reshape_dataobj(prox, (2, 3, 4)), np.reshape(arr, (2, 3, 4))) - assert_equal(prox.shape, shape) - assert_equal(arr.shape, shape) + assert prox.shape == shape + assert arr.shape == shape assert_array_equal(reshape_dataobj(arr, (2, 3, 4)), np.reshape(arr, (2, 3, 4))) - assert_equal(arr.shape, shape) + assert arr.shape == shape class ArrGiver(object): @@ -216,7 +220,7 @@ def __array__(self): assert_array_equal(reshape_dataobj(ArrGiver(), (2, 3, 4)), np.reshape(arr, (2, 3, 4))) - assert_equal(arr.shape, shape) + assert arr.shape == shape def test_reshaped_is_proxy(): @@ -224,13 +228,16 @@ def test_reshaped_is_proxy(): hdr = FunkyHeader(shape) bio = BytesIO() prox = ArrayProxy(bio, hdr) - assert_true(isinstance(prox.reshape((2, 3, 4)), ArrayProxy)) + assert isinstance(prox.reshape((2, 3, 4)), ArrayProxy) minus1 = prox.reshape((2, -1, 4)) - assert_true(isinstance(minus1, ArrayProxy)) - assert_equal(minus1.shape, (2, 3, 4)) - assert_raises(ValueError, prox.reshape, (-1, -1, 4)) - assert_raises(ValueError, prox.reshape, (2, 3, 5)) - assert_raises(ValueError, prox.reshape, (2, -1, 5)) + assert isinstance(minus1, ArrayProxy) + assert minus1.shape == (2, 3, 4) + with pytest.raises(ValueError): + prox.reshape((-1, -1, 4)) + with pytest.raises(ValueError): + prox.reshape((2, 3, 5)) + with pytest.raises(ValueError): + prox.reshape((2, -1, 5)) def test_get_unscaled(): @@ -313,21 +320,24 @@ def check_mmap(hdr, offset, proxy_class, unscaled_is_mmap = isinstance(unscaled, np.memmap) back_is_mmap = isinstance(back_data, np.memmap) if expected_mode is None: - assert_false(unscaled_is_mmap) - assert_false(back_is_mmap) + assert not unscaled_is_mmap + assert not back_is_mmap else: - assert_equal(unscaled_is_mmap, - viral_memmap or unscaled_really_mmap) - assert_equal(back_is_mmap, - viral_memmap or scaled_really_mmap) + assert (unscaled_is_mmap == + (viral_memmap or unscaled_really_mmap)) + assert (back_is_mmap == + (viral_memmap or scaled_really_mmap)) if scaled_really_mmap: - assert_equal(back_data.mode, expected_mode) + assert back_data.mode == expected_mode del prox, back_data # Check that mmap is keyword-only - assert_raises(TypeError, proxy_class, fname, hdr, True) + with pytest.raises(TypeError): + proxy_class(fname, hdr, True) # Check invalid values raise error - assert_raises(ValueError, proxy_class, fname, hdr, mmap='rw') - assert_raises(ValueError, proxy_class, fname, hdr, mmap='r+') + with pytest.raises(ValueError): + proxy_class(fname, hdr, mmap='rw') + with pytest.raises(ValueError): + proxy_class(fname, hdr, mmap='r+') # An image opener class which counts how many instances of itself have been @@ -462,11 +472,11 @@ def test_keep_file_open_true_false_invalid(): fobj.write(data.tostring(order='F')) for invalid_kfo in (55, 'auto', 'cauto'): - with assert_raises(ValueError): + with pytest.raises(ValueError): ArrayProxy(fname, ((10, 10, 10), dtype), keep_file_open=invalid_kfo) with patch_keep_file_open_default(invalid_kfo): - with assert_raises(ValueError): + with pytest.raises(ValueError): ArrayProxy(fname, ((10, 10, 10), dtype)) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index d5547e875f..fa24b37102 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -16,10 +16,8 @@ from numpy.testing import (assert_array_almost_equal, assert_array_equal) -from nose.tools import (assert_true, assert_false, - assert_equal, assert_not_equal, - assert_raises) -from ..testing import (assert_allclose_safely, suppress_warnings, +import pytest +from ..testing_pytest import (assert_allclose_safely, suppress_warnings, error_warnings) @@ -56,8 +54,8 @@ def test_arraywriters(): for type in test_types: arr = np.arange(10, dtype=type) aw = klass(arr) - assert_true(aw.array is arr) - assert_equal(aw.out_dtype, arr.dtype) + assert aw.array is arr + assert aw.out_dtype == arr.dtype assert_array_equal(arr, round_trip(aw)) # Byteswapped should be OK bs_arr = arr.byteswap().newbyteorder('S') @@ -80,7 +78,7 @@ def test_arraywriters(): # C order works as well arr_back = round_trip(a2w, 'C') assert_array_equal(arr2, arr_back) - assert_true(arr_back.flags.c_contiguous) + assert arr_back.flags.c_contiguous def test_arraywriter_check_scaling(): @@ -89,14 +87,17 @@ def test_arraywriter_check_scaling(): arr = np.array([0, 1, 128, 255], np.uint8) aw = ArrayWriter(arr) # Out of range, scaling needed, default is error - assert_raises(WriterError, ArrayWriter, arr, np.int8) + with pytest.raises(WriterError): + ArrayWriter(arr, np.int8) # Make default explicit - assert_raises(WriterError, ArrayWriter, arr, np.int8, check_scaling=True) + with pytest.raises(WriterError): + ArrayWriter(arr, np.int8, check_scaling=True) # Turn off scaling check aw = ArrayWriter(arr, np.int8, check_scaling=False) assert_array_equal(round_trip(aw), np.clip(arr, 0, 127)) # Has to be keyword - assert_raises(TypeError, ArrayWriter, arr, np.int8, False) + with pytest.raises(TypeError): + ArrayWriter(arr, np.int8, False) def test_no_scaling(): @@ -154,39 +155,42 @@ def test_scaling_needed(): dt_def = [('f', 'i4')] arr = np.ones(10, dt_def) for t in NUMERIC_TYPES: - assert_raises(WriterError, ArrayWriter, arr, t) + with pytest.raises(WriterError): + ArrayWriter(arr, t) narr = np.ones(10, t) - assert_raises(WriterError, ArrayWriter, narr, dt_def) - assert_false(ArrayWriter(arr).scaling_needed()) - assert_false(ArrayWriter(arr, dt_def).scaling_needed()) + with pytest.raises(WriterError): + ArrayWriter(narr, dt_def) + assert not ArrayWriter(arr).scaling_needed() + assert not ArrayWriter(arr, dt_def).scaling_needed() # Any numeric type that can cast, needs no scaling for in_t in NUMERIC_TYPES: for out_t in NUMERIC_TYPES: if np.can_cast(in_t, out_t): aw = ArrayWriter(np.ones(10, in_t), out_t) - assert_false(aw.scaling_needed()) + assert not aw.scaling_needed() for in_t in NUMERIC_TYPES: # Numeric types to complex never need scaling arr = np.ones(10, in_t) for out_t in COMPLEX_TYPES: - assert_false(ArrayWriter(arr, out_t).scaling_needed()) + assert not ArrayWriter(arr, out_t).scaling_needed() # Attempts to scale from complex to anything else fails for in_t in COMPLEX_TYPES: for out_t in FLOAT_TYPES + IUINT_TYPES: arr = np.ones(10, in_t) - assert_raises(WriterError, ArrayWriter, arr, out_t) + with pytest.raises(WriterError): + ArrayWriter(arr, out_t) # Scaling from anything but complex to floats is OK for in_t in FLOAT_TYPES + IUINT_TYPES: arr = np.ones(10, in_t) for out_t in FLOAT_TYPES: - assert_false(ArrayWriter(arr, out_t).scaling_needed()) + assert not ArrayWriter(arr, out_t).scaling_needed() # For any other output type, arrays with no data don't need scaling for in_t in FLOAT_TYPES + IUINT_TYPES: arr_0 = np.zeros(10, in_t) arr_e = [] for out_t in IUINT_TYPES: - assert_false(ArrayWriter(arr_0, out_t).scaling_needed()) - assert_false(ArrayWriter(arr_e, out_t).scaling_needed()) + assert not ArrayWriter(arr_0, out_t).scaling_needed() + assert not ArrayWriter(arr_e, out_t).scaling_needed() # Going to (u)ints, non-finite arrays don't need scaling for writers that # can do scaling because these use finite_range to threshold the input data, # but ArrayWriter does not do this. so scaling_needed is True @@ -197,17 +201,16 @@ def test_scaling_needed(): arr_mix = np.array([np.nan, np.inf, -np.inf], dtype=in_t) for out_t in IUINT_TYPES: for arr in (arr_nan, arr_inf, arr_minf, arr_mix): - assert_true( - ArrayWriter(arr, out_t, check_scaling=False).scaling_needed()) - assert_false(SlopeArrayWriter(arr, out_t).scaling_needed()) - assert_false(SlopeInterArrayWriter(arr, out_t).scaling_needed()) + assert ArrayWriter(arr, out_t, check_scaling=False).scaling_needed() + assert not SlopeArrayWriter(arr, out_t).scaling_needed() + assert not SlopeInterArrayWriter(arr, out_t).scaling_needed() # Floats as input always need scaling for in_t in FLOAT_TYPES: arr = np.ones(10, in_t) for out_t in IUINT_TYPES: # We need an arraywriter that will tolerate construction when # scaling is needed - assert_true(SlopeArrayWriter(arr, out_t).scaling_needed()) + assert SlopeArrayWriter(arr, out_t).scaling_needed() # in-range (u)ints don't need scaling for in_t in IUINT_TYPES: in_info = np.iinfo(in_t) @@ -217,18 +220,18 @@ def test_scaling_needed(): out_min, out_max = out_info.min, out_info.max if in_min >= out_min and in_max <= out_max: arr = np.array([in_min, in_max], in_t) - assert_true(np.can_cast(arr.dtype, out_t)) + assert np.can_cast(arr.dtype, out_t) # We've already tested this with can_cast above, but... - assert_false(ArrayWriter(arr, out_t).scaling_needed()) + assert not ArrayWriter(arr, out_t).scaling_needed() continue # The output data type does not include the input data range max_min = max(in_min, out_min) # 0 for input or output uint min_max = min(in_max, out_max) arr = np.array([max_min, min_max], in_t) - assert_false(ArrayWriter(arr, out_t).scaling_needed()) - assert_true(SlopeInterArrayWriter(arr + 1, out_t).scaling_needed()) + assert not ArrayWriter(arr, out_t).scaling_needed() + assert SlopeInterArrayWriter(arr + 1, out_t).scaling_needed() if in_t in INT_TYPES: - assert_true(SlopeInterArrayWriter(arr - 1, out_t).scaling_needed()) + assert SlopeInterArrayWriter(arr - 1, out_t).scaling_needed() def test_special_rt(): @@ -239,14 +242,15 @@ def test_special_rt(): for in_dtt in FLOAT_TYPES: for out_dtt in IUINT_TYPES: in_arr = arr.astype(in_dtt) - assert_raises(WriterError, ArrayWriter, in_arr, out_dtt) + with pytest.raises(WriterError): + ArrayWriter(in_arr, out_dtt) aw = ArrayWriter(in_arr, out_dtt, check_scaling=False) mn, mx = shared_range(float, out_dtt) - assert_true(np.allclose(round_trip(aw).astype(float), - [mx, 0, mn])) + assert np.allclose(round_trip(aw).astype(float), + [mx, 0, mn]) for klass in (SlopeArrayWriter, SlopeInterArrayWriter): aw = klass(in_arr, out_dtt) - assert_equal(get_slope_inter(aw), (1, 0)) + assert get_slope_inter(aw) == (1, 0) assert_array_equal(round_trip(aw), 0) for in_dtt, out_dtt, awt in itertools.product( FLOAT_TYPES, @@ -254,7 +258,7 @@ def test_special_rt(): (ArrayWriter, SlopeArrayWriter, SlopeInterArrayWriter)): arr = np.zeros((3,), dtype=in_dtt) aw = awt(arr, out_dtt) - assert_equal(get_slope_inter(aw), (1, 0)) + assert get_slope_inter(aw) == (1, 0) assert_array_equal(round_trip(aw), 0) @@ -265,7 +269,7 @@ def test_high_int2uint(): arr = np.array([2**63], dtype=np.uint64) out_type = np.int64 aw = SlopeInterArrayWriter(arr, out_type) - assert_equal(aw.inter, 2**63) + assert aw.inter == 2**63 def test_slope_inter_castable(): @@ -282,7 +286,8 @@ def test_slope_inter_castable(): for in_dtt in FLOAT_TYPES: for out_dtt in IUINT_TYPES: in_arr = arr.astype(in_dtt) - assert_raises(WriterError, ArrayWriter, in_arr, out_dtt) + with pytest.raises(WriterError): + ArrayWriter(in_arr, out_dtt) aw = SlopeArrayWriter(arr.astype(in_dtt), out_dtt) # no error aw = SlopeInterArrayWriter(arr.astype(in_dtt), out_dtt) # no error for in_dtt, out_dtt, arr, slope_only, slope_inter, neither in ( @@ -313,17 +318,20 @@ def test_slope_inter_castable(): if slope_only: SlopeArrayWriter(data, out_dtt) else: - assert_raises(WriterError, SlopeArrayWriter, data, out_dtt) + with pytest.raises(WriterError): + SlopeArrayWriter(data, out_dtt) # With scaling and intercept if slope_inter: SlopeInterArrayWriter(data, out_dtt) else: - assert_raises(WriterError, SlopeInterArrayWriter, data, out_dtt) + with pytest.raises(WriterError): + SlopeInterArrayWriter(data, out_dtt) # With neither if neither: ArrayWriter(data, out_dtt) else: - assert_raises(WriterError, ArrayWriter, data, out_dtt) + with pytest.raises(WriterError): + ArrayWriter(data, out_dtt) def test_calculate_scale(): @@ -333,27 +341,28 @@ def test_calculate_scale(): SAW = SlopeArrayWriter # Offset handles scaling when it can aw = SIAW(npa([-2, -1], dtype=np.int8), np.uint8) - assert_equal(get_slope_inter(aw), (1.0, -2.0)) + assert get_slope_inter(aw) == (1.0, -2.0) # Sign flip handles these cases aw = SAW(npa([-2, -1], dtype=np.int8), np.uint8) - assert_equal(get_slope_inter(aw), (-1.0, 0.0)) + assert get_slope_inter(aw) == (-1.0, 0.0) aw = SAW(npa([-2, 0], dtype=np.int8), np.uint8) - assert_equal(get_slope_inter(aw), (-1.0, 0.0)) + assert get_slope_inter(aw) == (-1.0, 0.0) # But not when min magnitude is too large (scaling mechanism kicks in) aw = SAW(npa([-510, 0], dtype=np.int16), np.uint8) - assert_equal(get_slope_inter(aw), (-2.0, 0.0)) + assert get_slope_inter(aw) == (-2.0, 0.0) # Or for floats (attempts to expand across range) aw = SAW(npa([-2, 0], dtype=np.float32), np.uint8) - assert_not_equal(get_slope_inter(aw), (-1.0, 0.0)) + assert get_slope_inter(aw) != (-1.0, 0.0) # Case where offset handles scaling aw = SIAW(npa([-1, 1], dtype=np.int8), np.uint8) - assert_equal(get_slope_inter(aw), (1.0, -1.0)) + assert get_slope_inter(aw) == (1.0, -1.0) # Can't work for no offset case - assert_raises(WriterError, SAW, npa([-1, 1], dtype=np.int8), np.uint8) + with pytest.raises(WriterError): + SAW(npa([-1, 1], dtype=np.int8), np.uint8) # Offset trick can't work when max is out of range aw = SIAW(npa([-1, 255], dtype=np.int16), np.uint8) slope_inter = get_slope_inter(aw) - assert_not_equal(slope_inter, (1.0, -1.0)) + assert slope_inter != (1.0, -1.0) def test_resets(): @@ -391,11 +400,11 @@ def test_no_offset_scale(): (126, 127), (-127, 127)): aw = SAW(np.array(data, dtype=np.float32), np.int8) - assert_equal(aw.slope, 1.0) + assert aw.slope == 1.0 aw = SAW(np.array([-126, 127 * 2.0], dtype=np.float32), np.int8) - assert_equal(aw.slope, 2) + assert aw.slope == 2 aw = SAW(np.array([-128 * 2.0, 127], dtype=np.float32), np.int8) - assert_equal(aw.slope, 2) + assert aw.slope == 2 # Test that nasty abs behavior does not upset us n = -2**15 aw = SAW(np.array([n, n], dtype=np.int16), np.uint8) @@ -406,17 +415,17 @@ def test_with_offset_scale(): # Tests of specific cases in slope, inter SIAW = SlopeInterArrayWriter aw = SIAW(np.array([0, 127], dtype=np.int8), np.uint8) - assert_equal((aw.slope, aw.inter), (1, 0)) # in range + assert (aw.slope, aw.inter) == (1, 0) # in range aw = SIAW(np.array([-1, 126], dtype=np.int8), np.uint8) - assert_equal((aw.slope, aw.inter), (1, -1)) # offset only + assert (aw.slope, aw.inter) == (1, -1) # offset only aw = SIAW(np.array([-1, 254], dtype=np.int16), np.uint8) - assert_equal((aw.slope, aw.inter), (1, -1)) # offset only + assert (aw.slope, aw.inter) == (1, -1) # offset only aw = SIAW(np.array([-1, 255], dtype=np.int16), np.uint8) - assert_not_equal((aw.slope, aw.inter), (1, -1)) # Too big for offset only + assert (aw.slope, aw.inter) != (1, -1) # Too big for offset only aw = SIAW(np.array([-256, -2], dtype=np.int16), np.uint8) - assert_equal((aw.slope, aw.inter), (1, -256)) # offset only + assert (aw.slope, aw.inter) == (1, -256) # offset only aw = SIAW(np.array([-256, -2], dtype=np.int16), np.int8) - assert_equal((aw.slope, aw.inter), (1, -129)) # offset only + assert (aw.slope, aw.inter) == (1, -129) # offset only def test_io_scaling(): @@ -450,10 +459,10 @@ def test_io_scaling(): # Slope might be negative max_miss = np.abs(aw.slope) / 2. abs_err = np.abs(arr - arr3) - assert_true(np.all(abs_err <= max_miss)) + assert np.all(abs_err <= max_miss) if out_type in UINT_TYPES and 0 in (min(arr), max(arr)): # Check that error is minimized for 0 as min or max - assert_true(min(abs_err) == abs_err[arr == 0]) + assert min(abs_err) == abs_err[arr == 0] bio.truncate(0) bio.seek(0) @@ -476,10 +485,10 @@ def test_input_ranges(): max_miss = np.abs(aw.slope) / working_type(2.) + work_eps * 10 abs_err = np.abs(arr - arr3) max_err = np.abs(arr) * work_eps + max_miss - assert_true(np.all(abs_err <= max_err)) + assert np.all(abs_err <= max_err) if out_type in UINT_TYPES and 0 in (min(arr), max(arr)): # Check that error is minimized for 0 as min or max - assert_true(min(abs_err) == abs_err[arr == 0]) + assert min(abs_err) == abs_err[arr == 0] bio.truncate(0) bio.seek(0) @@ -500,12 +509,13 @@ def test_nan2zero(): assert_array_equal(np.isnan(data_back), [True, False]) # Deprecation warning for nan2zero as argument to `to_fileobj` with error_warnings(): - assert_raises(DeprecationWarning, - aw.to_fileobj, BytesIO(), 'F', True) - assert_raises(DeprecationWarning, - aw.to_fileobj, BytesIO(), 'F', nan2zero=True) + with pytest.raises(DeprecationWarning): + aw.to_fileobj(BytesIO(), 'F', True) + with pytest.raises(DeprecationWarning): + aw.to_fileobj(BytesIO(), 'F', nan2zero=True) # Error if nan2zero is not the value set at initialization - assert_raises(WriterError, aw.to_fileobj, BytesIO(), 'F', False) + with pytest.raises(WriterError): + aw.to_fileobj(BytesIO(), 'F', False) # set explicitly aw = awt(arr, np.float32, nan2zero=True, **kwargs) data_back = round_trip(aw) @@ -521,12 +531,13 @@ def test_nan2zero(): assert_array_equal(data_back, [astype_res, 99]) # Deprecation warning for nan2zero as argument to `to_fileobj` with error_warnings(): - assert_raises(DeprecationWarning, - aw.to_fileobj, BytesIO(), 'F', False) - assert_raises(DeprecationWarning, - aw.to_fileobj, BytesIO(), 'F', nan2zero=False) + with pytest.raises(DeprecationWarning): + aw.to_fileobj(BytesIO(), 'F', False) + with pytest.raises(DeprecationWarning): + aw.to_fileobj(BytesIO(), 'F', nan2zero=False) # Error if nan2zero is not the value set at initialization - assert_raises(WriterError, aw.to_fileobj, BytesIO(), 'F', True) + with pytest.raises(WriterError): + aw.to_fileobj(BytesIO(), 'F', True) def test_byte_orders(): @@ -580,55 +591,62 @@ def test_to_float(): for klass in (SlopeInterArrayWriter, SlopeArrayWriter, ArrayWriter): if in_type in COMPLEX_TYPES and out_type in FLOAT_TYPES: - assert_raises(WriterError, klass, arr, out_type) + with pytest.raises(WriterError): + klass(arr, out_type) continue aw = klass(arr, out_type) - assert_true(aw.array is arr) - assert_equal(aw.out_dtype, out_type) + assert aw.array is arr + assert aw.out_dtype == out_type arr_back = round_trip(aw) assert_array_equal(arr.astype(out_type), arr_back) # Check too-big values overflowed correctly out_min, out_max = out_info['min'], out_info['max'] - assert_true(np.all(arr_back[arr > out_max] == np.inf)) - assert_true(np.all(arr_back[arr < out_min] == -np.inf)) + assert np.all(arr_back[arr > out_max] == np.inf) + assert np.all(arr_back[arr < out_min] == -np.inf) def test_dumber_writers(): arr = np.arange(10, dtype=np.float64) aw = SlopeArrayWriter(arr) aw.slope = 2.0 - assert_equal(aw.slope, 2.0) - assert_raises(AttributeError, getattr, aw, 'inter') + assert aw.slope == 2.0 + with pytest.raises(AttributeError): + getattr(aw, 'inter') aw = ArrayWriter(arr) - assert_raises(AttributeError, getattr, aw, 'slope') - assert_raises(AttributeError, getattr, aw, 'inter') + with pytest.raises(AttributeError): + getattr(aw, 'slope') + with pytest.raises(AttributeError): + getattr(aw, 'inter') # Attempt at scaling should raise error for dumb type - assert_raises(WriterError, ArrayWriter, arr, np.int16) + with pytest.raises(WriterError): + ArrayWriter(arr, np.int16) def test_writer_maker(): arr = np.arange(10, dtype=np.float64) aw = make_array_writer(arr, np.float64) - assert_true(isinstance(aw, SlopeInterArrayWriter)) + assert isinstance(aw, SlopeInterArrayWriter) aw = make_array_writer(arr, np.float64, True, True) - assert_true(isinstance(aw, SlopeInterArrayWriter)) + assert isinstance(aw, SlopeInterArrayWriter) aw = make_array_writer(arr, np.float64, True, False) - assert_true(isinstance(aw, SlopeArrayWriter)) + assert isinstance(aw, SlopeArrayWriter) aw = make_array_writer(arr, np.float64, False, False) - assert_true(isinstance(aw, ArrayWriter)) - assert_raises(ValueError, make_array_writer, arr, np.float64, False) - assert_raises(ValueError, make_array_writer, arr, np.float64, False, True) + assert isinstance(aw, ArrayWriter) + with pytest.raises(ValueError): + make_array_writer(arr, np.float64, False) + with pytest.raises(ValueError): + make_array_writer(arr, np.float64, False, True) # Does calc_scale get run by default? aw = make_array_writer(arr, np.int16, calc_scale=False) - assert_equal((aw.slope, aw.inter), (1, 0)) + assert (aw.slope, aw.inter) == (1, 0) aw.calc_scale() slope, inter = aw.slope, aw.inter - assert_false((slope, inter) == (1, 0)) + assert not (slope, inter) == (1, 0) # Should run by default aw = make_array_writer(arr, np.int16) - assert_equal((aw.slope, aw.inter), (slope, inter)) + assert (aw.slope, aw.inter) == (slope, inter) aw = make_array_writer(arr, np.int16, calc_scale=True) - assert_equal((aw.slope, aw.inter), (slope, inter)) + assert (aw.slope, aw.inter) == (slope, inter) def test_float_int_min_max(): @@ -647,7 +665,7 @@ def test_float_int_min_max(): except ScalingError: continue arr_back_sc = round_trip(aw) - assert_true(np.allclose(arr, arr_back_sc)) + assert np.allclose(arr, arr_back_sc) def test_int_int_min_max(): @@ -666,7 +684,7 @@ def test_int_int_min_max(): # integer allclose adiff = int_abs(arr - arr_back_sc) rdiff = adiff / (arr + eps) - assert_true(np.all(rdiff < rtol)) + assert np.all(rdiff < rtol) def test_int_int_slope(): @@ -687,12 +705,12 @@ def test_int_int_slope(): aw = SlopeArrayWriter(arr, out_dt) except ScalingError: continue - assert_false(aw.slope == 0) + assert not aw.slope == 0 arr_back_sc = round_trip(aw) # integer allclose adiff = int_abs(arr - arr_back_sc) rdiff = adiff / (arr + eps) - assert_true(np.all(rdiff < rtol)) + assert np.all(rdiff < rtol) def test_float_int_spread(): @@ -712,7 +730,7 @@ def test_float_int_spread(): # Simulate allclose test with large atol diff = np.abs(arr_t - arr_back_sc) rdiff = diff / np.abs(arr_t) - assert_true(np.all((diff <= max_miss) | (rdiff <= 1e-5))) + assert np.all((diff <= max_miss) | (rdiff <= 1e-5)) def rt_err_estimate(arr_t, out_dtype, slope, inter): @@ -749,7 +767,7 @@ def test_rt_bias(): aw.inter) # Hokey use of max_miss as a std estimate bias_thresh = np.max([max_miss / np.sqrt(count), eps]) - assert_true(np.abs(bias) < bias_thresh) + assert np.abs(bias) < bias_thresh def test_nan2zero_scaling(): @@ -789,10 +807,10 @@ def test_nan2zero_scaling(): back_nan_0 = round_trip(nan_0_aw) * float(sign) zero_aw = awt(zero_arr, out_dt, nan2zero=True) back_zero = round_trip(zero_aw) * float(sign) - assert_true(np.allclose(back_nan[1:], back_zero[1:])) + assert np.allclose(back_nan[1:], back_zero[1:]) assert_array_equal(back_nan[1:], back_nan_0[2:]) - assert_true(np.abs(back_nan[0] - back_zero[0]) < 1e-2) - assert_equal(*back_nan_0[:2]) + assert np.abs(back_nan[0] - back_zero[0]) < 1e-2 + assert back_nan_0[0] == back_nan_0[1] def test_finite_range_nan(): @@ -834,11 +852,11 @@ def test_finite_range_nan(): continue # Should not matter about the order of finite range method call # and has_nan property - test this is true - assert_equal(aw.has_nan, has_nan) - assert_equal(aw.finite_range(), res) + assert aw.has_nan == has_nan + assert aw.finite_range() == res aw = awt(in_arr, out_type, **kwargs) - assert_equal(aw.finite_range(), res) - assert_equal(aw.has_nan, has_nan) + assert aw.finite_range() == res + assert aw.has_nan == has_nan # Check float types work as complex in_arr = np.array(in_arr) if in_arr.dtype.kind == 'f': @@ -848,10 +866,11 @@ def test_finite_range_nan(): except WriterError: continue aw = awt(c_arr, out_type, **kwargs) - assert_equal(aw.has_nan, has_nan) - assert_equal(aw.finite_range(), res) + assert aw.has_nan == has_nan + assert aw.finite_range() == res # Structured type cannot be nan and we can test this a = np.array([[1., 0, 1], [2, 3, 4]]).view([('f1', 'f')]) aw = awt(a, a.dtype, **kwargs) - assert_raises(TypeError, aw.finite_range) - assert_false(aw.has_nan) + with pytest.raises(TypeError): + aw.finite_range() + assert not aw.has_nan diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index 45e8c28a52..b5ccdd2907 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -34,12 +34,11 @@ from ..spatialimages import HeaderDataError from .. import imageglobals -from unittest import TestCase +from unittest import TestCase, SkipTest from numpy.testing import assert_array_equal +import pytest -from ..testing import (assert_equal, assert_true, assert_false, - assert_raises, assert_not_equal) INTEGER_TYPES = np.sctypes['int'] + np.sctypes['uint'] @@ -80,7 +79,7 @@ def log_chk(hdr, level): if level == 0: # Should never log or raise error logger.setLevel(0) hdrc.check_fix(logger=logger, error_level=0) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' logger.removeHandler(handler) return hdrc, '', () # Non zero defect level, test above and below threshold. @@ -90,12 +89,12 @@ def log_chk(hdr, level): # Logging level above threshold, no log. logger.setLevel(level + 1) hdrc.check_fix(logger=logger, error_level=e_lev) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' # Logging level below threshold, log appears, store logged message logger.setLevel(level - 1) hdrc = hdr.copy() hdrc.check_fix(logger=logger, error_level=e_lev) - assert_true(str_io.getvalue() != '') + assert str_io.getvalue() != '' message = str_io.getvalue().strip() logger.removeHandler(handler) # When error level == level, check_fix should raise an error @@ -119,16 +118,22 @@ def get_bad_bb(self): # means do not check return None + @classmethod + def setUpClass(cls): + if cls.header_class is None: + raise SkipTest("no testing methods from the abstract class") + + def test_general_init(self): hdr = self.header_class() # binaryblock has length given by header data dtype binblock = hdr.binaryblock - assert_equal(len(binblock), hdr.structarr.dtype.itemsize) + assert len(binblock) == hdr.structarr.dtype.itemsize # Endianness will be native by default for empty header - assert_equal(hdr.endianness, native_code) + assert hdr.endianness == native_code # But you can change this if you want hdr = self.header_class(endianness='swapped') - assert_equal(hdr.endianness, swapped_code) + assert hdr.endianness == swapped_code # You can also pass in a check flag, without data this has no # effect hdr = self.header_class(check=False) @@ -141,17 +146,17 @@ def test__eq__(self): # Test equal and not equal hdr1 = self.header_class() hdr2 = self.header_class() - assert_equal(hdr1, hdr2) + assert hdr1 == hdr2 self._set_something_into_hdr(hdr1) - assert_not_equal(hdr1, hdr2) + assert hdr1 != hdr2 self._set_something_into_hdr(hdr2) - assert_equal(hdr1, hdr2) + assert hdr1 == hdr2 # Check byteswapping maintains equality hdr3 = hdr2.as_byteswapped() - assert_equal(hdr2, hdr3) + assert hdr2 == hdr3 # Check comparing to funny thing says no - assert_not_equal(hdr1, None) - assert_not_equal(hdr1, 1) + assert hdr1 != None + assert hdr1 != 1 def test_to_from_fileobj(self): # Successful write using write_to @@ -160,28 +165,26 @@ def test_to_from_fileobj(self): hdr.write_to(str_io) str_io.seek(0) hdr2 = self.header_class.from_fileobj(str_io) - assert_equal(hdr2.endianness, native_code) - assert_equal(hdr2.binaryblock, hdr.binaryblock) + assert hdr2.endianness == native_code + assert hdr2.binaryblock == hdr.binaryblock def test_mappingness(self): hdr = self.header_class() - assert_raises(ValueError, - hdr.__setitem__, - 'nonexistent key', - 0.1) + with pytest.raises(ValueError): + hdr.__setitem__('nonexistent key', 0.1) hdr_dt = hdr.structarr.dtype keys = hdr.keys() - assert_equal(keys, list(hdr)) + assert keys == list(hdr) vals = hdr.values() - assert_equal(len(vals), len(keys)) - assert_equal(keys, list(hdr_dt.names)) + assert len(vals) == len(keys) + assert keys == list(hdr_dt.names) for key, val in hdr.items(): assert_array_equal(hdr[key], val) # verify that .get operates as destined - assert_equal(hdr.get('nonexistent key'), None) - assert_equal(hdr.get('nonexistent key', 'default'), 'default') - assert_equal(hdr.get(keys[0]), vals[0]) - assert_equal(hdr.get(keys[0], 'default'), vals[0]) + assert hdr.get('nonexistent key') == None + assert hdr.get('nonexistent key', 'default') == 'default' + assert hdr.get(keys[0]) == vals[0] + assert hdr.get(keys[0], 'default') == vals[0] # make sure .get returns values which evaluate to False. We have to # use a different falsy value depending on the data type of the first @@ -189,9 +192,9 @@ def test_mappingness(self): falsyval = 0 if np.issubdtype(hdr_dt[0], np.number) else b'' hdr[keys[0]] = falsyval - assert_equal(hdr[keys[0]], falsyval) - assert_equal(hdr.get(keys[0]), falsyval) - assert_equal(hdr.get(keys[0], -1), falsyval) + assert hdr[keys[0]] == falsyval + assert hdr.get(keys[0]) == falsyval + assert hdr.get(keys[0], -1) == falsyval def test_endianness_ro(self): @@ -203,16 +206,17 @@ def test_endianness_ro(self): data) - but this is done via via the as_byteswapped method ''' hdr = self.header_class() - assert_raises(AttributeError, hdr.__setattr__, 'endianness', '<') + with pytest.raises(AttributeError): + hdr.__setattr__('endianness', '<') def test_endian_guess(self): # Check guesses of endian eh = self.header_class() - assert_equal(eh.endianness, native_code) + assert eh.endianness == native_code hdr_data = eh.structarr.copy() hdr_data = hdr_data.byteswap(swapped_code) eh_swapped = self.header_class(hdr_data.tostring()) - assert_equal(eh_swapped.endianness, swapped_code) + assert eh_swapped.endianness == swapped_code def test_binblock_is_file(self): # Checks that the binary string respresentation is the whole of the @@ -224,7 +228,7 @@ def test_binblock_is_file(self): hdr = self.header_class() str_io = BytesIO() hdr.write_to(str_io) - assert_equal(str_io.getvalue(), hdr.binaryblock) + assert str_io.getvalue() == hdr.binaryblock def test_structarr(self): # structarr attribute also read only @@ -232,7 +236,8 @@ def test_structarr(self): # Just check we can get structarr hdr.structarr # That it's read only - assert_raises(AttributeError, hdr.__setattr__, 'structarr', 0) + with pytest.raises(AttributeError): + hdr.__setattr__('structarr', 0) def log_chk(self, hdr, level): return log_chk(hdr, level) @@ -241,50 +246,49 @@ def assert_no_log_err(self, hdr): """ Assert that no logging or errors result from this `hdr` """ fhdr, message, raiser = self.log_chk(hdr, 0) - assert_equal((fhdr, message), (hdr, '')) + assert (fhdr, message) == (hdr, '') def test_bytes(self): # Test get of bytes hdr1 = self.header_class() bb = hdr1.binaryblock hdr2 = self.header_class(hdr1.binaryblock) - assert_equal(hdr1, hdr2) - assert_equal(hdr1.binaryblock, hdr2.binaryblock) + assert hdr1 == hdr2 + assert hdr1.binaryblock == hdr2.binaryblock # Do a set into the header, and try again. The specifics of 'setting # something' will depend on the nature of the bytes object self._set_something_into_hdr(hdr1) hdr2 = self.header_class(hdr1.binaryblock) - assert_equal(hdr1, hdr2) - assert_equal(hdr1.binaryblock, hdr2.binaryblock) + assert hdr1 == hdr2 + assert hdr1.binaryblock == hdr2.binaryblock # Short and long binaryblocks give errors # (here set through init) - assert_raises(WrapStructError, - self.header_class, - bb[:-1]) - assert_raises(WrapStructError, - self.header_class, - bb + b'\x00') + with pytest.raises(WrapStructError): + self.header_class(bb[:-1]) + with pytest.raises(WrapStructError): + self.header_class(bb + b'\x00') # Checking set to true by default, and prevents nonsense being # set into the header. bb_bad = self.get_bad_bb() if bb_bad is None: return with imageglobals.LoggingOutputSuppressor(): - assert_raises(HeaderDataError, self.header_class, bb_bad) + with pytest.raises(HeaderDataError): + self.header_class(bb_bad) # now slips past without check _ = self.header_class(bb_bad, check=False) def test_as_byteswapped(self): # Check byte swapping hdr = self.header_class() - assert_equal(hdr.endianness, native_code) + assert hdr.endianness == native_code # same code just returns a copy hdr2 = hdr.as_byteswapped(native_code) - assert_false(hdr is hdr2) + assert not hdr is hdr2 # Different code gives byteswapped copy hdr_bs = hdr.as_byteswapped(swapped_code) - assert_equal(hdr_bs.endianness, swapped_code) - assert_not_equal(hdr.binaryblock, hdr_bs.binaryblock) + assert hdr_bs.endianness == swapped_code + assert hdr.binaryblock != hdr_bs.binaryblock # Note that contents is not rechecked on swap / copy class DC(self.header_class): @@ -292,7 +296,8 @@ class DC(self.header_class): def check_fix(self, *args, **kwargs): raise Exception # Assumes check=True default - assert_raises(Exception, DC, hdr.binaryblock) + with pytest.raises(Exception): + DC(hdr.binaryblock) hdr = DC(hdr.binaryblock, check=False) hdr2 = hdr.as_byteswapped(native_code) hdr_bs = hdr.as_byteswapped(swapped_code) @@ -311,7 +316,8 @@ def test_str(self): hdr = self.header_class() # Check something returns from str s1 = str(hdr) - assert_true(len(s1) > 0) + assert len(s1) > 0 + class _TestLabeledWrapStruct(_TestWrapStructBase): @@ -324,27 +330,30 @@ class MyHdr(self.header_class): _field_recoders = {} hdr = MyHdr() # Key not existing raises error - assert_raises(ValueError, hdr.get_value_label, 'improbable') + with pytest.raises(ValueError): + hdr.get_value_label('improbable') # Even if there is a recoder - assert_true('improbable' not in hdr.keys()) + assert 'improbable' not in hdr.keys() rec = Recoder([[0, 'fullness of heart']], ('code', 'label')) hdr._field_recoders['improbable'] = rec - assert_raises(ValueError, hdr.get_value_label, 'improbable') + with pytest.raises(ValueError): + hdr.get_value_label('improbable') # If the key exists in the structure, and is intable, then we can recode for key, value in hdr.items(): # No recoder at first - assert_raises(ValueError, hdr.get_value_label, 0) + with pytest.raises(ValueError): + hdr.get_value_label(0) if not value.dtype.type in INTEGER_TYPES or not np.isscalar(value): continue code = int(value) rec = Recoder([[code, 'fullness of heart']], ('code', 'label')) hdr._field_recoders[key] = rec - assert_equal(hdr.get_value_label(key), 'fullness of heart') + assert hdr.get_value_label(key) == 'fullness of heart' # If key exists, but value is missing, we get 'unknown code' # Speculating that we can set code value 0 or 1 new_code = 1 if code == 0 else 0 hdr[key] = new_code - assert_equal(hdr.get_value_label(key), + assert (hdr.get_value_label(key) == ''.format(new_code)) @@ -418,48 +427,48 @@ def _set_something_into_hdr(self, hdr): def test_empty(self): # Test contents of default header hdr = self.header_class() - assert_equal(hdr['an_integer'], 1) - assert_equal(hdr['a_str'], b'a string') + assert hdr['an_integer'] == 1 + assert hdr['a_str'] == b'a string' def test_str(self): hdr = self.header_class() s1 = str(hdr) - assert_true(len(s1) > 0) - assert_true('an_integer' in s1) - assert_true('a_str' in s1) + assert len(s1) > 0 + assert 'an_integer' in s1 + assert 'a_str' in s1 def test_copy(self): hdr = self.header_class() hdr2 = hdr.copy() - assert_equal(hdr, hdr2) + assert hdr == hdr2 self._set_something_into_hdr(hdr) - assert_not_equal(hdr, hdr2) + assert hdr != hdr2 self._set_something_into_hdr(hdr2) - assert_equal(hdr, hdr2) + assert hdr == hdr2 def test_copy(self): hdr = self.header_class() hdr2 = hdr.copy() - assert_equal(hdr, hdr2) + assert hdr == hdr2 self._set_something_into_hdr(hdr) - assert_not_equal(hdr, hdr2) + assert hdr != hdr2 self._set_something_into_hdr(hdr2) - assert_equal(hdr, hdr2) + assert hdr == hdr2 def test_checks(self): # Test header checks hdr_t = self.header_class() # _dxer just returns the diagnostics as a string # Default hdr is OK - assert_equal(self._dxer(hdr_t), '') + assert self._dxer(hdr_t) == '' # An integer should be 1 hdr = hdr_t.copy() hdr['an_integer'] = 2 - assert_equal(self._dxer(hdr), 'an_integer should be 1') + assert self._dxer(hdr) == 'an_integer should be 1' # String should be lower case hdr = hdr_t.copy() hdr['a_str'] = 'My Name' - assert_equal(self._dxer(hdr), 'a_str should be lower case') + assert self._dxer(hdr) == 'a_str should be lower case' def test_log_checks(self): # Test logging, fixing, errors for header checking @@ -470,15 +479,15 @@ def test_log_checks(self): hdr['an_integer'] = 2 # severity 40 fhdr, message, raiser = self.log_chk(hdr, 40) return - assert_equal(fhdr['an_integer'], 1) - assert_equal(message, + assert fhdr['an_integer'] == 1 + assert (message == 'an_integer should be 1; set an_integer to 1') assert_raises(*raiser) # lower case string hdr = HC() hdr['a_str'] = 'Hello' # severity = 20 fhdr, message, raiser = self.log_chk(hdr, 20) - assert_equal(message, 'a_str should be lower case; ' + assert (message == 'a_str should be lower case; ' 'set a_str to lower case') assert_raises(*raiser) @@ -499,12 +508,13 @@ def test_logger_error(self): # Check log message appears in new logger imageglobals.logger = logger hdr.copy().check_fix() - assert_equal(str_io.getvalue(), + assert (str_io.getvalue() == 'a_str should be lower case; ' 'set a_str to lower case\n') # Check that error_level in fact causes error to be raised imageglobals.error_level = 20 - assert_raises(HeaderDataError, hdr.copy().check_fix) + with pytest.raises(HeaderDataError): + hdr.copy().check_fix() finally: imageglobals.logger, imageglobals.error_level = log_cache @@ -518,13 +528,13 @@ class MyHdr(self.header_class): _field_recoders = {} hdr = MyHdr() s1 = str(hdr) - assert_true(len(s1) > 0) - assert_true('an_integer : 1' in s1) - assert_true('fullness of heart' not in s1) + assert len(s1) > 0 + assert 'an_integer : 1' in s1 + assert 'fullness of heart' not in s1 rec = Recoder([[1, 'fullness of heart']], ('code', 'label')) hdr._field_recoders['an_integer'] = rec s2 = str(hdr) - assert_true('fullness of heart' in s2) + assert 'fullness of heart' in s2 hdr['an_integer'] = 10 s1 = str(hdr) - assert_true('' in s1) + assert '' in s1 From d65e325f876241e15f264916780bbbce3928870e Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 12 Nov 2019 09:10:14 -0500 Subject: [PATCH 446/689] converting test_b* and test_c* --- nibabel/tests/test_batteryrunners.py | 61 ++++++++------- nibabel/tests/test_brikhead.py | 30 +++---- nibabel/tests/test_casting.py | 112 ++++++++++++++------------- 3 files changed, 103 insertions(+), 100 deletions(-) diff --git a/nibabel/tests/test_batteryrunners.py b/nibabel/tests/test_batteryrunners.py index 1130c2f4cb..883054ff96 100644 --- a/nibabel/tests/test_batteryrunners.py +++ b/nibabel/tests/test_batteryrunners.py @@ -15,9 +15,7 @@ from ..batteryrunners import BatteryRunner, Report -from ..testing import (assert_true, assert_false, assert_equal, - assert_not_equal, assert_raises) - +import pytest # define some trivial functions as checks def chk1(obj, fix=False): @@ -80,64 +78,67 @@ def chk_error(obj, fix=False): def test_init_basic(): # With no args, raise - assert_raises(TypeError, BatteryRunner) + with pytest.raises(TypeError): + BatteryRunner() # Len returns number of checks battrun = BatteryRunner((chk1,)) - assert_equal(len(battrun), 1) + assert len(battrun) == 1 battrun = BatteryRunner((chk1, chk2)) - assert_equal(len(battrun), 2) + assert len(battrun) == 2 def test_init_report(): rep = Report() - assert_equal(rep, Report(Exception, 0, '', '')) + assert rep == Report(Exception, 0, '', '') def test_report_strings(): rep = Report() - assert_not_equal(rep.__str__(), '') - assert_equal(rep.message, '') + assert rep.__str__() != '' + assert rep.message == '' str_io = StringIO() rep.write_raise(str_io) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' rep = Report(ValueError, 20, 'msg', 'fix') rep.write_raise(str_io) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' rep.problem_level = 30 rep.write_raise(str_io) - assert_equal(str_io.getvalue(), 'Level 30: msg; fix\n') + assert str_io.getvalue() == 'Level 30: msg; fix\n' str_io.truncate(0) str_io.seek(0) # No fix string, no fix message rep.fix_msg = '' rep.write_raise(str_io) - assert_equal(str_io.getvalue(), 'Level 30: msg\n') + assert str_io.getvalue() == 'Level 30: msg\n' rep.fix_msg = 'fix' str_io.truncate(0) str_io.seek(0) # If we drop the level, nothing goes to the log rep.problem_level = 20 rep.write_raise(str_io) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' # Unless we set the default log level in the call rep.write_raise(str_io, log_level=20) - assert_equal(str_io.getvalue(), 'Level 20: msg; fix\n') + assert str_io.getvalue() == 'Level 20: msg; fix\n' str_io.truncate(0) str_io.seek(0) # If we set the error level down this low, we raise an error - assert_raises(ValueError, rep.write_raise, str_io, 20) + with pytest.raises(ValueError): + rep.write_raise(str_io, 20) # But the log level wasn't low enough to do a log entry - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' # Error still raised with lower log threshold, but now we do get a # log entry - assert_raises(ValueError, rep.write_raise, str_io, 20, 20) - assert_equal(str_io.getvalue(), 'Level 20: msg; fix\n') + with pytest.raises(ValueError): + rep.write_raise(str_io, 20, 20) + assert str_io.getvalue() == 'Level 20: msg; fix\n' # If there's no error, we can't raise str_io.truncate(0) str_io.seek(0) rep.error = None rep.write_raise(str_io, 20) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' def test_logging(): @@ -147,10 +148,10 @@ def test_logging(): logger.setLevel(30) # defaultish level logger.addHandler(logging.StreamHandler(str_io)) rep.log_raise(logger) - assert_equal(str_io.getvalue(), '') + assert str_io.getvalue() == '' rep.problem_level = 30 rep.log_raise(logger) - assert_equal(str_io.getvalue(), 'msg; fix\n') + assert str_io.getvalue() == 'msg; fix\n' str_io.truncate(0) str_io.seek(0) @@ -158,26 +159,26 @@ def test_logging(): def test_checks(): battrun = BatteryRunner((chk1,)) reports = battrun.check_only({}) - assert_equal(reports[0], + assert (reports[0] == Report(KeyError, 20, 'no "testkey"', '')) obj, reports = battrun.check_fix({}) - assert_equal(reports[0], + assert (reports[0] == Report(KeyError, 20, 'no "testkey"', 'added "testkey"')) - assert_equal(obj, {'testkey': 1}) + assert obj == {'testkey': 1} battrun = BatteryRunner((chk1, chk2)) reports = battrun.check_only({}) - assert_equal(reports[0], + assert (reports[0] == Report(KeyError, 20, 'no "testkey"', '')) - assert_equal(reports[1], + assert (reports[1] == Report(KeyError, 20, 'no "testkey"', @@ -187,14 +188,14 @@ def test_checks(): # Note, because obj is mutable, first and second point to modified # (and final) dictionary output_obj = {'testkey': 0} - assert_equal(reports[0], + assert (reports[0] == Report(KeyError, 20, 'no "testkey"', 'added "testkey"')) - assert_equal(reports[1], + assert (reports[1] == Report(ValueError, 10, '"testkey" != 0', 'set "testkey" to 0')) - assert_equal(obj, output_obj) + assert obj == output_obj diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index d09023d248..078193ce48 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -14,9 +14,9 @@ from .. import load, Nifti1Image from .. import brikhead -from nose.tools import (assert_true, assert_equal, assert_raises) +import pytest from numpy.testing import assert_array_equal -from ..testing import data_path +from ..testing_pytest import data_path from .test_fileslice import slicer_samples from .test_helpers import assert_data_similar @@ -80,10 +80,10 @@ def test_makehead(self): for tp in self.test_files: head1 = self.module.AFNIHeader.from_fileobj(tp['head']) head2 = self.module.AFNIHeader.from_header(head1) - assert_equal(head1, head2) - with assert_raises(self.module.AFNIHeaderError): + assert head1 == head2 + with pytest.raises(self.module.AFNIHeaderError): self.module.AFNIHeader.from_header(header=None) - with assert_raises(self.module.AFNIHeaderError): + with pytest.raises(self.module.AFNIHeaderError): self.module.AFNIHeader.from_header(tp['fname']) @@ -94,22 +94,22 @@ class TestAFNIImage(object): def test_brikheadfile(self): for tp in self.test_files: brik = self.module.load(tp['fname']) - assert_equal(brik.get_data_dtype().type, tp['dtype']) - assert_equal(brik.shape, tp['shape']) - assert_equal(brik.header.get_zooms(), tp['zooms']) + assert brik.get_data_dtype().type == tp['dtype'] + assert brik.shape == tp['shape'] + assert brik.header.get_zooms() == tp['zooms'] assert_array_equal(brik.affine, tp['affine']) - assert_equal(brik.header.get_space(), tp['space']) + assert brik.header.get_space() == tp['space'] data = brik.get_fdata() - assert_equal(data.shape, tp['shape']) + assert data.shape == tp['shape'] assert_array_equal(brik.dataobj.scaling, tp['scaling']) - assert_equal(brik.header.get_volume_labels(), tp['labels']) + assert brik.header.get_volume_labels() == tp['labels'] def test_load(self): # Check highest level load of brikhead works for tp in self.test_files: img = self.module.load(tp['head']) data = img.get_fdata() - assert_equal(data.shape, tp['shape']) + assert data.shape == tp['shape'] # min, max, mean values assert_data_similar(data, tp) # check if file can be converted to nifti @@ -123,7 +123,7 @@ def test_array_proxy_slicing(self): img = self.module.load(tp['fname']) arr = img.get_fdata() prox = img.dataobj - assert_true(prox.is_proxy) + assert prox.is_proxy for sliceobj in slicer_samples(img.shape): assert_array_equal(arr[sliceobj], prox[sliceobj]) @@ -134,7 +134,7 @@ class TestBadFiles(object): def test_brikheadfile(self): for tp in self.test_files: - with assert_raises(tp['err']): + with pytest.raises(tp['err']): self.module.load(tp['head']) @@ -145,5 +145,5 @@ class TestBadVars(object): def test_unpack_var(self): for var in self.vars: - with assert_raises(self.module.AFNIHeaderError): + with pytest.raises(self.module.AFNIHeaderError): self.module._unpack_var(var) diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index c9d3645ad1..791cdacedb 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -8,11 +8,11 @@ from ..casting import (float_to_int, shared_range, CastingError, int_to_float, as_int, int_abs, floor_log2, able_int_type, best_float, ulp, longdouble_precision_improved) -from ..testing import suppress_warnings +from ..testing_pytest import suppress_warnings from numpy.testing import (assert_array_almost_equal, assert_array_equal) -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +import pytest def test_shared_range(): @@ -35,7 +35,7 @@ def test_shared_range(): # not have an exact representation. fimax = int_to_float(imax, ft) if np.isfinite(fimax): - assert_true(int(fimax) != imax) + assert int(fimax) != imax # Therefore the imax, cast back to float, and to integer, will # overflow. If it overflows to the imax, we need to allow for # that possibility in the testing of our overflowed values @@ -43,13 +43,13 @@ def test_shared_range(): if imax_roundtrip == imax: thresh_overflow = True if thresh_overflow: - assert_true(np.all( + assert np.all( (bit_bigger == casted_mx) | - (bit_bigger == imax))) + (bit_bigger == imax)) else: - assert_true(np.all((bit_bigger <= casted_mx))) + assert np.all((bit_bigger <= casted_mx)) if it in np.sctypes['uint']: - assert_equal(mn, 0) + assert mn == 0 continue # And something larger for the minimum with suppress_warnings(): # overflow @@ -63,7 +63,7 @@ def test_shared_range(): # not have an exact representation. fimin = int_to_float(imin, ft) if np.isfinite(fimin): - assert_true(int(fimin) != imin) + assert int(fimin) != imin # Therefore the imin, cast back to float, and to integer, will # overflow. If it overflows to the imin, we need to allow for # that possibility in the testing of our overflowed values @@ -71,11 +71,11 @@ def test_shared_range(): if imin_roundtrip == imin: thresh_overflow = True if thresh_overflow: - assert_true(np.all( + assert np.all( (bit_smaller == casted_mn) | - (bit_smaller == imin))) + (bit_smaller == imin)) else: - assert_true(np.all((bit_smaller >= casted_mn))) + assert np.all((bit_smaller >= casted_mn)) def test_shared_range_inputs(): @@ -114,7 +114,8 @@ def test_casting(): im_exp[1] = ii.max assert_array_equal(iarr, im_exp) # NaNs, with nan2zero False, gives error - assert_raises(CastingError, float_to_int, farr, it, False) + with pytest.raises(CastingError): + float_to_int(farr, it, False) # We can pass through NaNs if we really want exp_arr[arr.index(np.nan)] = ft(np.nan).astype(it) with np.errstate(invalid='ignore'): @@ -130,7 +131,8 @@ def test_casting(): with np.errstate(invalid='ignore'): assert_array_equal(float_to_int(np.nan, np.int16), [0]) # Test nans give error if not nan2zero - assert_raises(CastingError, float_to_int, np.nan, np.int16, False) + with pytest.raises(CastingError): + float_to_int(np.nan, np.int16, False) def test_int_abs(): @@ -139,25 +141,25 @@ def test_int_abs(): in_arr = np.array([info.min, info.max], dtype=itype) idtype = np.dtype(itype) udtype = np.dtype(idtype.str.replace('i', 'u')) - assert_equal(udtype.kind, 'u') - assert_equal(idtype.itemsize, udtype.itemsize) + assert udtype.kind == 'u' + assert idtype.itemsize == udtype.itemsize mn, mx = in_arr e_mn = as_int(mx) + 1 # as_int needed for numpy 1.4.1 casting - assert_equal(int_abs(mx), mx) - assert_equal(int_abs(mn), e_mn) + assert int_abs(mx) == mx + assert int_abs(mn) == e_mn assert_array_equal(int_abs(in_arr), [e_mn, mx]) def test_floor_log2(): - assert_equal(floor_log2(2**9 + 1), 9) - assert_equal(floor_log2(-2**9 + 1), 8) - assert_equal(floor_log2(2), 1) - assert_equal(floor_log2(1), 0) - assert_equal(floor_log2(0.5), -1) - assert_equal(floor_log2(0.75), -1) - assert_equal(floor_log2(0.25), -2) - assert_equal(floor_log2(0.24), -3) - assert_equal(floor_log2(0), None) + assert floor_log2(2**9 + 1) == 9 + assert floor_log2(-2**9 + 1) == 8 + assert floor_log2(2) == 1 + assert floor_log2(1) == 0 + assert floor_log2(0.5) == -1 + assert floor_log2(0.75) == -1 + assert floor_log2(0.25) == -2 + assert floor_log2(0.24) == -3 + assert floor_log2(0) == None def test_able_int_type(): @@ -176,7 +178,7 @@ def test_able_int_type(): ([-1, 2**64 - 1], None), ([0, 2**64 - 1], np.uint64), ([0, 2**64], None)): - assert_equal(able_int_type(vals), exp_out) + assert able_int_type(vals) == exp_out def test_able_casting(): @@ -193,11 +195,11 @@ def test_able_casting(): ApBt = (A + B).dtype.type able_type = able_int_type([in_mn, in_mx, out_mn, out_mx]) if able_type is None: - assert_equal(ApBt, np.float64) + assert ApBt == np.float64 continue # Use str for comparison to avoid int32/64 vs intp comparison # failures - assert_equal(np.dtype(ApBt).str, np.dtype(able_type).str) + assert np.dtype(ApBt).str == np.dtype(able_type).str def test_best_float(): @@ -212,51 +214,51 @@ def test_best_float(): best = best_float() end_of_ints = np.float64(2**53) # float64 has continuous integers up to 2**53 - assert_equal(end_of_ints, end_of_ints + 1) + assert end_of_ints == end_of_ints + 1 # longdouble may have more, but not on 32 bit windows, at least end_of_ints = np.longdouble(2**53) if (end_of_ints == (end_of_ints + 1) or # off continuous integers machine() == 'sparc64' or # crippling slow longdouble on sparc longdouble_precision_improved()): # Windows precisions can change - assert_equal(best, np.float64) + assert best == np.float64 else: - assert_equal(best, np.longdouble) + assert best == np.longdouble def test_longdouble_precision_improved(): # Just check that this can only be True on windows, msvc from numpy.distutils.ccompiler import get_default_compiler if not (os.name == 'nt' and get_default_compiler() == 'msvc'): - assert_false(longdouble_precision_improved()) + assert not longdouble_precision_improved() def test_ulp(): - assert_equal(ulp(), np.finfo(np.float64).eps) - assert_equal(ulp(1.0), np.finfo(np.float64).eps) - assert_equal(ulp(np.float32(1.0)), np.finfo(np.float32).eps) - assert_equal(ulp(np.float32(1.999)), np.finfo(np.float32).eps) + assert ulp() == np.finfo(np.float64).eps + assert ulp(1.0) == np.finfo(np.float64).eps + assert ulp(np.float32(1.0)) == np.finfo(np.float32).eps + assert ulp(np.float32(1.999)) == np.finfo(np.float32).eps # Integers always return 1 - assert_equal(ulp(1), 1) - assert_equal(ulp(2**63 - 1), 1) + assert ulp(1) == 1 + assert ulp(2**63 - 1) == 1 # negative / positive same - assert_equal(ulp(-1), 1) - assert_equal(ulp(7.999), ulp(4.0)) - assert_equal(ulp(-7.999), ulp(4.0)) - assert_equal(ulp(np.float64(2**54 - 2)), 2) - assert_equal(ulp(np.float64(2**54)), 4) - assert_equal(ulp(np.float64(2**54)), 4) + assert ulp(-1) == 1 + assert ulp(7.999) == ulp(4.0) + assert ulp(-7.999) == ulp(4.0) + assert ulp(np.float64(2**54 - 2)) == 2 + assert ulp(np.float64(2**54)) == 4 + assert ulp(np.float64(2**54)) == 4 # Infs, NaNs return nan - assert_true(np.isnan(ulp(np.inf))) - assert_true(np.isnan(ulp(-np.inf))) - assert_true(np.isnan(ulp(np.nan))) + assert np.isnan(ulp(np.inf)) + assert np.isnan(ulp(-np.inf)) + assert np.isnan(ulp(np.nan)) # 0 gives subnormal smallest subn64 = np.float64(2**(-1022 - 52)) subn32 = np.float32(2**(-126 - 23)) - assert_equal(ulp(0.0), subn64) - assert_equal(ulp(np.float64(0)), subn64) - assert_equal(ulp(np.float32(0)), subn32) + assert ulp(0.0) == subn64 + assert ulp(np.float64(0)) == subn64 + assert ulp(np.float32(0)) == subn32 # as do multiples of subnormal smallest - assert_equal(ulp(subn64 * np.float64(2**52)), subn64) - assert_equal(ulp(subn64 * np.float64(2**53)), subn64 * 2) - assert_equal(ulp(subn32 * np.float32(2**23)), subn32) - assert_equal(ulp(subn32 * np.float32(2**24)), subn32 * 2) + assert ulp(subn64 * np.float64(2**52)) == subn64 + assert ulp(subn64 * np.float64(2**53)) == subn64 * 2 + assert ulp(subn32 * np.float32(2**23)) == subn32 + assert ulp(subn32 * np.float32(2**24)) == subn32 * 2 From 5f07260e00204a80ae5d4784058880f6c6e537e1 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 12 Nov 2019 09:12:29 -0500 Subject: [PATCH 447/689] updating travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index def7ca2369..944ec7e014 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,7 +130,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_a*.py + pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py pytest -v ../nibabel/tests/test_w*.py else false From 16187e2b12229b947a8ce1d59fac9d6aa3f67595 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 12 Nov 2019 15:09:53 -0500 Subject: [PATCH 448/689] adding converted test_d* --- .travis.yml | 2 +- nibabel/tests/test_data.py | 151 ++++++++++++------------------- nibabel/tests/test_deprecated.py | 25 +++-- nibabel/tests/test_deprecator.py | 113 ++++++++++++----------- nibabel/tests/test_dft.py | 71 +++++++-------- 5 files changed, 163 insertions(+), 199 deletions(-) diff --git a/.travis.yml b/.travis.yml index 944ec7e014..63176ce581 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,7 +130,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py + pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py pytest -v ../nibabel/tests/test_w*.py else false diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index 641d6e55cd..0fc0bdc40d 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -16,93 +16,80 @@ from .. import data as nibd -from nose import with_setup -from nose.tools import (assert_equal, assert_raises, raises, assert_false) +import pytest from .test_environment import (setup_environment, teardown_environment, DATA_KEY, USER_KEY) -DATA_FUNCS = {} - - -def setup_data_env(): +@pytest.fixture() +def with_environment(request): setup_environment() - global DATA_FUNCS + DATA_FUNCS = {} DATA_FUNCS['home_dir_func'] = nibd.get_nipy_user_dir DATA_FUNCS['sys_dir_func'] = nibd.get_nipy_system_dir DATA_FUNCS['path_func'] = nibd.get_data_path + def teardown_data_env(): + teardown_environment() + nibd.get_nipy_user_dir = DATA_FUNCS['home_dir_func'] + nibd.get_nipy_system_dir = DATA_FUNCS['sys_dir_func'] + nibd.get_data_path = DATA_FUNCS['path_func'] -def teardown_data_env(): - teardown_environment() - nibd.get_nipy_user_dir = DATA_FUNCS['home_dir_func'] - nibd.get_nipy_system_dir = DATA_FUNCS['sys_dir_func'] - nibd.get_data_path = DATA_FUNCS['path_func'] - - -# decorator to use setup, teardown environment -with_environment = with_setup(setup_data_env, teardown_data_env) + request.addfinalizer(teardown_data_env) def test_datasource(): # Tests for DataSource pth = pjoin('some', 'path') ds = Datasource(pth) - yield assert_equal, ds.get_filename('unlikeley'), pjoin(pth, 'unlikeley') - yield (assert_equal, ds.get_filename('un', 'like', 'ley'), - pjoin(pth, 'un', 'like', 'ley')) + assert ds.get_filename('unlikeley') == pjoin(pth, 'unlikeley') + assert ds.get_filename('un', 'like', 'ley') == pjoin(pth, 'un', 'like', 'ley') def test_versioned(): with TemporaryDirectory() as tmpdir: - yield (assert_raises, - DataError, - VersionedDatasource, - tmpdir) + with pytest.raises(DataError): + VersionedDatasource(tmpdir) tmpfile = pjoin(tmpdir, 'config.ini') # ini file, but wrong section with open(tmpfile, 'wt') as fobj: fobj.write('[SOMESECTION]\n') fobj.write('version = 0.1\n') - yield (assert_raises, - DataError, - VersionedDatasource, - tmpdir) + with pytest.raises(DataError): + VersionedDatasource(tmpdir) # ini file, but right section, wrong key with open(tmpfile, 'wt') as fobj: fobj.write('[DEFAULT]\n') fobj.write('somekey = 0.1\n') - yield (assert_raises, - DataError, - VersionedDatasource, - tmpdir) + with pytest.raises(DataError): + VersionedDatasource(tmpdir) # ini file, right section and key with open(tmpfile, 'wt') as fobj: fobj.write('[DEFAULT]\n') fobj.write('version = 0.1\n') vds = VersionedDatasource(tmpdir) - yield assert_equal, vds.version, '0.1' - yield assert_equal, vds.version_no, 0.1 - yield assert_equal, vds.major_version, 0 - yield assert_equal, vds.minor_version, 1 - yield assert_equal, vds.get_filename('config.ini'), tmpfile + assert vds.version == '0.1' + assert vds.version_no == 0.1 + assert vds.major_version == 0 + assert vds.minor_version == 1 + assert vds.get_filename('config.ini') == tmpfile # ini file, right section and key, funny value with open(tmpfile, 'wt') as fobj: fobj.write('[DEFAULT]\n') fobj.write('version = 0.1.2.dev\n') vds = VersionedDatasource(tmpdir) - yield assert_equal, vds.version, '0.1.2.dev' - yield assert_equal, vds.version_no, 0.1 - yield assert_equal, vds.major_version, 0 - yield assert_equal, vds.minor_version, 1 + assert vds.version == '0.1.2.dev' + assert vds.version_no == 0.1 + assert vds.major_version == 0 + assert vds.minor_version == 1 def test__cfg_value(): # no file, return '' - yield assert_equal, _cfg_value('/implausible_file'), '' + assert _cfg_value('/implausible_file') == '' # try files try: fd, tmpfile = tempfile.mkstemp() @@ -111,16 +98,16 @@ def test__cfg_value(): fobj.write('[strange section]\n') fobj.write('path = /some/path\n') fobj.flush() - yield assert_equal, _cfg_value(tmpfile), '' + assert _cfg_value(tmpfile) == '' # right section, wrong key fobj.write('[DATA]\n') fobj.write('funnykey = /some/path\n') fobj.flush() - yield assert_equal, _cfg_value(tmpfile), '' + assert _cfg_value(tmpfile) == '' # right section, right key fobj.write('path = /some/path\n') fobj.flush() - yield assert_equal, _cfg_value(tmpfile), '/some/path' + assert _cfg_value(tmpfile) == '/some/path' fobj.close() finally: try: @@ -129,8 +116,7 @@ def test__cfg_value(): pass -@with_environment -def test_data_path(): +def test_data_path(with_environment): # wipe out any sources of data paths if DATA_KEY in env: del env[DATA_KEY] @@ -147,15 +133,15 @@ def test_data_path(): def_dirs = [pjoin(sys.prefix, 'share', 'nipy')] if sys.prefix == '/usr': def_dirs.append(pjoin('/usr/local', 'share', 'nipy')) - assert_equal(old_pth, def_dirs + ['/user/path']) + assert old_pth == def_dirs + ['/user/path'] # then we'll try adding some of our own tst_pth = '/a/path' + os.path.pathsep + '/b/ path' tst_list = ['/a/path', '/b/ path'] # First, an environment variable os.environ[DATA_KEY] = tst_list[0] - assert_equal(get_data_path(), tst_list[:1] + old_pth) + assert get_data_path() == tst_list[:1] + old_pth os.environ[DATA_KEY] = tst_pth - assert_equal(get_data_path(), tst_list + old_pth) + assert get_data_path() == tst_list + old_pth del os.environ[DATA_KEY] # Next, make a fake user directory, and put a file in there with TemporaryDirectory() as tmpdir: @@ -164,9 +150,9 @@ def test_data_path(): fobj.write('[DATA]\n') fobj.write('path = %s' % tst_pth) nibd.get_nipy_user_dir = lambda: tmpdir - assert_equal(get_data_path(), tst_list + def_dirs + [tmpdir]) + assert get_data_path() == tst_list + def_dirs + [tmpdir] nibd.get_nipy_user_dir = lambda: fake_user_dir - assert_equal(get_data_path(), old_pth) + assert get_data_path() == old_pth # with some trepidation, the system config files with TemporaryDirectory() as tmpdir: nibd.get_nipy_system_dir = lambda: tmpdir @@ -178,7 +164,7 @@ def test_data_path(): with open(tmpfile, 'wt') as fobj: fobj.write('[DATA]\n') fobj.write('path = %s\n' % '/path/two') - assert_equal(get_data_path(), + assert (get_data_path() == tst_list + ['/path/two'] + old_pth) @@ -189,74 +175,58 @@ def test_find_data_dir(): # under_here == '/nipy/utils' # subhere = 'tests' # fails with non-existant path - yield (assert_raises, - DataError, - find_data_dir, - [here], - 'implausible', - 'directory') + with pytest.raises(DataError): + find_data_dir([here], 'implausible', 'directory') # fails with file, when directory expected - yield (assert_raises, - DataError, - find_data_dir, - [here], - fname) + with pytest.raises(DataError): + find_data_dir([here], fname) # passes with directory that exists dd = find_data_dir([under_here], subhere) - yield assert_equal, dd, here + assert dd == here # and when one path in path list does not work dud_dir = pjoin(under_here, 'implausible') dd = find_data_dir([dud_dir, under_here], subhere) - yield assert_equal, dd, here + assert dd == here -@with_environment -def test_make_datasource(): +def test_make_datasource(with_environment): pkg_def = dict( relpath='pkg') with TemporaryDirectory() as tmpdir: nibd.get_data_path = lambda: [tmpdir] - yield (assert_raises, - DataError, - make_datasource, - pkg_def) + with pytest.raises(DataError): + make_datasource(pkg_def) pkg_dir = pjoin(tmpdir, 'pkg') os.mkdir(pkg_dir) - yield (assert_raises, - DataError, - make_datasource, - pkg_def) + with pytest.raises(DataError): + make_datasource(pkg_def) tmpfile = pjoin(pkg_dir, 'config.ini') with open(tmpfile, 'wt') as fobj: fobj.write('[DEFAULT]\n') fobj.write('version = 0.1\n') ds = make_datasource(pkg_def, data_path=[tmpdir]) - yield assert_equal, ds.version, '0.1' + assert ds.version == '0.1' -@raises(DataError) def test_bomber(): - b = Bomber('bomber example', 'a message') - b.any_attribute # no error + with pytest.raises(DataError): + b = Bomber('bomber example', 'a message') + b.any_attribute # no error def test_bomber_inspect(): b = Bomber('bomber example', 'a message') - assert_false(hasattr(b, 'any_attribute')) + assert not hasattr(b, 'any_attribute') -@with_environment -def test_datasource_or_bomber(): +def test_datasource_or_bomber(with_environment): pkg_def = dict( relpath='pkg') with TemporaryDirectory() as tmpdir: nibd.get_data_path = lambda: [tmpdir] ds = datasource_or_bomber(pkg_def) - yield (assert_raises, - DataError, - getattr, - ds, - 'get_filename') + with pytest.raises(DataError): + getattr(ds, 'get_filename') pkg_dir = pjoin(tmpdir, 'pkg') os.mkdir(pkg_dir) tmpfile = pjoin(pkg_dir, 'config.ini') @@ -271,8 +241,5 @@ def test_datasource_or_bomber(): ds.get_filename('some_file.txt') pkg_def['min version'] = '0.3' ds = datasource_or_bomber(pkg_def) # not OK - yield (assert_raises, - DataError, - getattr, - ds, - 'get_filename') + with pytest.raises(DataError): + getattr(ds, 'get_filename') diff --git a/nibabel/tests/test_deprecated.py b/nibabel/tests/test_deprecated.py index 2964707717..c031a0c60d 100644 --- a/nibabel/tests/test_deprecated.py +++ b/nibabel/tests/test_deprecated.py @@ -7,7 +7,6 @@ from nibabel.deprecated import (ModuleProxy, FutureWarningMixin, deprecate_with_version) -from nose.tools import (assert_true, assert_equal) from nibabel.tests.test_deprecator import TestDeprecatorFunc as _TestDF @@ -25,9 +24,9 @@ def teardown(): def test_module_proxy(): # Test proxy for module mp = ModuleProxy('nibabel.deprecated') - assert_true(hasattr(mp, 'ModuleProxy')) - assert_true(mp.ModuleProxy is ModuleProxy) - assert_equal(repr(mp), '') + assert hasattr(mp, 'ModuleProxy') + assert mp.ModuleProxy is ModuleProxy + assert repr(mp) == '' def test_futurewarning_mixin(): @@ -47,19 +46,19 @@ class E(FutureWarningMixin, C): warn_message = "Oh no, not this one" with warnings.catch_warnings(record=True) as warns: c = C(42) - assert_equal(c.meth(), 42) - assert_equal(warns, []) + assert c.meth() == 42 + assert warns == [] d = D(42) - assert_equal(d.meth(), 42) + assert d.meth() == 42 warn = warns.pop(0) - assert_equal(warn.category, FutureWarning) - assert_equal(str(warn.message), + assert warn.category == FutureWarning + assert (str(warn.message) == 'This class will be removed in future versions') e = E(42) - assert_equal(e.meth(), 42) + assert e.meth() == 42 warn = warns.pop(0) - assert_equal(warn.category, FutureWarning) - assert_equal(str(warn.message), 'Oh no, not this one') + assert warn.category == FutureWarning + assert str(warn.message) == 'Oh no, not this one' class TestNibabelDeprecator(_TestDF): @@ -78,6 +77,6 @@ def func(): try: pkg_info.cmp_pkg_version.__defaults__ = ('2.0dev',) # No error, even though version is dev version of current - assert_equal(func(), 99) + assert func() == 99 finally: pkg_info.cmp_pkg_version.__defaults__ = ('2.0',) diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index c4dc2437a4..14255da4b4 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -5,37 +5,37 @@ import warnings from functools import partial -from nose.tools import (assert_true, assert_raises, assert_equal) +import pytest from nibabel.deprecator import (_ensure_cr, _add_dep_doc, ExpiredDeprecationError, Deprecator) -from ..testing import clear_and_catch_warnings +from ..testing_pytest import clear_and_catch_warnings _OWN_MODULE = sys.modules[__name__] def test__ensure_cr(): # Make sure text ends with carriage return - assert_equal(_ensure_cr(' foo'), ' foo\n') - assert_equal(_ensure_cr(' foo\n'), ' foo\n') - assert_equal(_ensure_cr(' foo '), ' foo\n') - assert_equal(_ensure_cr('foo '), 'foo\n') - assert_equal(_ensure_cr('foo \n bar'), 'foo \n bar\n') - assert_equal(_ensure_cr('foo \n\n'), 'foo\n') + assert _ensure_cr(' foo') == ' foo\n' + assert _ensure_cr(' foo\n') == ' foo\n' + assert _ensure_cr(' foo ') == ' foo\n' + assert _ensure_cr('foo ') == 'foo\n' + assert _ensure_cr('foo \n bar') == 'foo \n bar\n' + assert _ensure_cr('foo \n\n') == 'foo\n' def test__add_dep_doc(): # Test utility function to add deprecation message to docstring - assert_equal(_add_dep_doc('', 'foo'), 'foo\n') - assert_equal(_add_dep_doc('bar', 'foo'), 'bar\n\nfoo\n') - assert_equal(_add_dep_doc(' bar', 'foo'), ' bar\n\nfoo\n') - assert_equal(_add_dep_doc(' bar', 'foo\n'), ' bar\n\nfoo\n') - assert_equal(_add_dep_doc('bar\n\n', 'foo'), 'bar\n\nfoo\n') - assert_equal(_add_dep_doc('bar\n \n', 'foo'), 'bar\n\nfoo\n') - assert_equal(_add_dep_doc(' bar\n\nSome explanation', 'foo\nbaz'), + assert _add_dep_doc('', 'foo') == 'foo\n' + assert _add_dep_doc('bar', 'foo') == 'bar\n\nfoo\n' + assert _add_dep_doc(' bar', 'foo') == ' bar\n\nfoo\n' + assert _add_dep_doc(' bar', 'foo\n') == ' bar\n\nfoo\n' + assert _add_dep_doc('bar\n\n', 'foo') == 'bar\n\nfoo\n' + assert _add_dep_doc('bar\n \n', 'foo') == 'bar\n\nfoo\n' + assert (_add_dep_doc(' bar\n\nSome explanation', 'foo\nbaz') == ' bar\n\nfoo\nbaz\n\nSome explanation\n') - assert_equal(_add_dep_doc(' bar\n\n Some explanation', 'foo\nbaz'), + assert (_add_dep_doc(' bar\n\n Some explanation', 'foo\nbaz') == ' bar\n \n foo\n baz\n \n Some explanation\n') @@ -71,75 +71,79 @@ def test_dep_func(self): func = dec('foo')(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - assert_true(w[0].category is DeprecationWarning) - assert_equal(func.__doc__, 'foo\n') + assert func() == None + assert len(w) == 1 + assert w[0].category is DeprecationWarning + assert func.__doc__ == 'foo\n' func = dec('foo')(func_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(1), None) - assert_equal(len(w), 1) - assert_equal(func.__doc__, 'A docstring\n\nfoo\n') + assert func(1) == None + assert len(w) == 1 + assert func.__doc__ == 'A docstring\n\nfoo\n' func = dec('foo')(func_doc_long) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(1, 2), None) - assert_equal(len(w), 1) - assert_equal(func.__doc__, 'A docstring\n \n foo\n \n Some text\n') + assert func(1, 2) == None + assert len(w) == 1 + assert func.__doc__ == 'A docstring\n \n foo\n \n Some text\n' # Try some since and until versions func = dec('foo', '1.1')(func_no_doc) - assert_equal(func.__doc__, 'foo\n\n* deprecated from version: 1.1\n') + assert func.__doc__ == 'foo\n\n* deprecated from version: 1.1\n' with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - func = dec('foo', until='2.4')(func_no_doc) + assert func() == None + assert len(w) == 1 + func = dec('foo', until='99.4')(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - assert_equal(func.__doc__, - 'foo\n\n* Will raise {} as of version: 2.4\n' + assert func() == None + assert len(w) == 1 + assert (func.__doc__ == + 'foo\n\n* Will raise {} as of version: 99.4\n' .format(ExpiredDeprecationError)) func = dec('foo', until='1.8')(func_no_doc) - assert_raises(ExpiredDeprecationError, func) - assert_equal(func.__doc__, + with pytest.raises(ExpiredDeprecationError): + func() + assert (func.__doc__ == 'foo\n\n* Raises {} as of version: 1.8\n' .format(ExpiredDeprecationError)) func = dec('foo', '1.2', '1.8')(func_no_doc) - assert_raises(ExpiredDeprecationError, func) - assert_equal(func.__doc__, + with pytest.raises(ExpiredDeprecationError): + func() + assert (func.__doc__ == 'foo\n\n* deprecated from version: 1.2\n' '* Raises {} as of version: 1.8\n' .format(ExpiredDeprecationError)) func = dec('foo', '1.2', '1.8')(func_doc_long) - assert_equal(func.__doc__, + assert (func.__doc__ == 'A docstring\n \n foo\n \n' ' * deprecated from version: 1.2\n' ' * Raises {} as of version: 1.8\n \n' ' Some text\n' .format(ExpiredDeprecationError)) - assert_raises(ExpiredDeprecationError, func) + with pytest.raises(ExpiredDeprecationError): + func() # Check different warnings and errors func = dec('foo', warn_class=UserWarning)(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - assert_true(w[0].category is UserWarning) + assert func() == None + assert len(w) == 1 + assert w[0].category is UserWarning func = dec('foo', error_class=CustomError)(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - assert_true(w[0].category is DeprecationWarning) + assert func() == None + assert len(w) == 1 + assert w[0].category is DeprecationWarning func = dec('foo', until='1.8', error_class=CustomError)(func_no_doc) - assert_raises(CustomError, func) + with pytest.raises(CustomError): + func() class TestDeprecatorMaker(object): @@ -152,17 +156,18 @@ def test_deprecator_maker(self): func = dec('foo')(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - assert_true(w[0].category is UserWarning) + assert func() == None + assert len(w) == 1 + assert w[0].category is UserWarning dec = self.dep_maker(error_class=CustomError) func = dec('foo')(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert_equal(func(), None) - assert_equal(len(w), 1) - assert_true(w[0].category is DeprecationWarning) + assert func() == None + assert len(w) == 1 + assert w[0].category is DeprecationWarning func = dec('foo', until='1.8')(func_no_doc) - assert_raises(CustomError, func) + with pytest.raises(CustomError): + func() diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index 0285b01575..d50ba4023e 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -4,35 +4,28 @@ import os from os.path import join as pjoin, dirname from io import BytesIO -from ..testing import suppress_warnings - -import numpy as np +from ..testing_pytest import suppress_warnings with suppress_warnings(): from .. import dft from .. import nifti1 -from nose import SkipTest -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +import pytest # Shield optional package imports from ..optpkg import optional_package -# setup_module will raise SkipTest if no dicom to import from nibabel.pydicom_compat import have_dicom PImage, have_pil, _ = optional_package('PIL.Image') -pil_test = np.testing.dec.skipif(not have_pil, 'could not import PIL.Image') data_dir = pjoin(dirname(__file__), 'data') -def setup_module(): - if os.name == 'nt': - raise SkipTest('FUSE not available for windows, skipping dft tests') - if not have_dicom: - raise SkipTest('Need pydicom for dft tests, skipping') - +pytestmark = [ + pytest.mark.skipif(os.name == 'nt', reason='FUSE not available for windows, skipping dft tests'), + pytest.mark.skipif(not have_dicom, reason='Need pydicom for dft tests, skipping') +] def test_init(): dft.clear_cache() @@ -41,41 +34,41 @@ def test_init(): def test_study(): studies = dft.get_studies(data_dir) - assert_equal(len(studies), 1) - assert_equal(studies[0].uid, + assert len(studies) == 1 + assert (studies[0].uid == '1.3.12.2.1107.5.2.32.35119.30000010011408520750000000022') - assert_equal(studies[0].date, '20100114') - assert_equal(studies[0].time, '121314.000000') - assert_equal(studies[0].comments, 'dft study comments') - assert_equal(studies[0].patient_name, 'dft patient name') - assert_equal(studies[0].patient_id, '1234') - assert_equal(studies[0].patient_birth_date, '19800102') - assert_equal(studies[0].patient_sex, 'F') + assert studies[0].date == '20100114' + assert studies[0].time == '121314.000000' + assert studies[0].comments == 'dft study comments' + assert studies[0].patient_name == 'dft patient name' + assert studies[0].patient_id == '1234' + assert studies[0].patient_birth_date == '19800102' + assert studies[0].patient_sex == 'F' def test_series(): studies = dft.get_studies(data_dir) - assert_equal(len(studies[0].series), 1) + assert len(studies[0].series) == 1 ser = studies[0].series[0] - assert_equal(ser.uid, + assert (ser.uid == '1.3.12.2.1107.5.2.32.35119.2010011420292594820699190.0.0.0') - assert_equal(ser.number, '12') - assert_equal(ser.description, 'CBU_DTI_64D_1A') - assert_equal(ser.rows, 256) - assert_equal(ser.columns, 256) - assert_equal(ser.bits_allocated, 16) - assert_equal(ser.bits_stored, 12) + assert ser.number == '12' + assert ser.description == 'CBU_DTI_64D_1A' + assert ser.rows == 256 + assert ser.columns == 256 + assert ser.bits_allocated == 16 + assert ser.bits_stored == 12 def test_storage_instances(): studies = dft.get_studies(data_dir) sis = studies[0].series[0].storage_instances - assert_equal(len(sis), 2) - assert_equal(sis[0].instance_number, 1) - assert_equal(sis[1].instance_number, 2) - assert_equal(sis[0].uid, + assert len(sis) == 2 + assert sis[0].instance_number == 1 + assert sis[1].instance_number == 2 + assert (sis[0].uid == '1.3.12.2.1107.5.2.32.35119.2010011420300180088599504.0') - assert_equal(sis[1].uid, + assert (sis[1].uid == '1.3.12.2.1107.5.2.32.35119.2010011420300180088599504.1') @@ -83,17 +76,17 @@ def test_storage_instance(): pass -@pil_test +@pytest.mark.skipif(not have_pil, reason='could not import PIL.Image') def test_png(): studies = dft.get_studies(data_dir) data = studies[0].series[0].as_png() im = PImage.open(BytesIO(data)) - assert_equal(im.size, (256, 256)) + assert im.size == (256, 256) def test_nifti(): studies = dft.get_studies(data_dir) data = studies[0].series[0].as_nifti() - assert_equal(len(data), 352 + 2 * 256 * 256 * 2) + assert len(data) == 352 + 2 * 256 * 256 * 2 h = nifti1.Nifti1Header(data[:348]) - assert_equal(h.get_data_shape(), (256, 256, 2)) + assert h.get_data_shape() == (256, 256, 2) From 4ee404b63e4a9314a3ee3e3b3b27dc26a20b0281 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 15 Nov 2019 14:41:30 -0500 Subject: [PATCH 449/689] converting more tests to pytest --- nibabel/tests/test_ecat.py | 94 ++--- nibabel/tests/test_ecat_data.py | 7 +- nibabel/tests/test_endiancodes.py | 24 +- nibabel/tests/test_environment.py | 52 +-- nibabel/tests/test_euler.py | 86 ++-- nibabel/tests/test_filebasedimages.py | 21 +- nibabel/tests/test_filehandles.py | 4 - nibabel/tests/test_fileholders.py | 45 +-- nibabel/tests/test_filename_parser.py | 81 ++-- nibabel/tests/test_files_interface.py | 32 +- nibabel/tests/test_fileslice.py | 556 +++++++++++++------------- nibabel/tests/test_fileutils.py | 23 +- nibabel/tests/test_floating.py | 156 ++++---- nibabel/tests/test_funcs.py | 42 +- nibabel/tests/test_h5py_compat.py | 21 +- nibabel/tests/test_helpers.py | 7 +- 16 files changed, 614 insertions(+), 637 deletions(-) diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index a3a40b2904..b681a59b0e 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -17,11 +17,11 @@ get_frame_order, get_series_framenumbers) from unittest import TestCase -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +import pytest from numpy.testing import assert_array_equal, assert_array_almost_equal -from ..testing import data_path, suppress_warnings, clear_and_catch_warnings +from ..testing_pytest import data_path, suppress_warnings, clear_and_catch_warnings from ..tmpdirs import InTemporaryDirectory from .test_wrapstruct import _TestWrapStructBase @@ -35,16 +35,16 @@ class TestEcatHeader(_TestWrapStructBase): example_file = ecat_file def test_header_size(self): - assert_equal(self.header_class.template_dtype.itemsize, 512) + assert self.header_class.template_dtype.itemsize == 512 def test_empty(self): hdr = self.header_class() - assert_true(len(hdr.binaryblock) == 512) - assert_true(hdr['magic_number'] == b'MATRIX72') - assert_true(hdr['sw_version'] == 74) - assert_true(hdr['num_frames'] == 0) - assert_true(hdr['file_type'] == 0) - assert_true(hdr['ecat_calibration_factor'] == 1.0) + assert len(hdr.binaryblock) == 512 + assert hdr['magic_number'] == b'MATRIX72' + assert hdr['sw_version'] == 74 + assert hdr['num_frames'] == 0 + assert hdr['file_type'] == 0 + assert hdr['ecat_calibration_factor'] == 1.0 def _set_something_into_hdr(self, hdr): # Called from test_bytes test method. Specific to the header data type @@ -53,28 +53,29 @@ def _set_something_into_hdr(self, hdr): def test_dtype(self): # dtype not specified in header, only in subheaders hdr = self.header_class() - assert_raises(NotImplementedError, hdr.get_data_dtype) + with pytest.raises(NotImplementedError): + hdr.get_data_dtype() def test_header_codes(self): fid = open(ecat_file, 'rb') hdr = self.header_class() newhdr = hdr.from_fileobj(fid) fid.close() - assert_true(newhdr.get_filetype() == 'ECAT7_VOLUME16') - assert_equal(newhdr.get_patient_orient(), + assert newhdr.get_filetype() == 'ECAT7_VOLUME16' + assert (newhdr.get_patient_orient() == 'ECAT7_Unknown_Orientation') def test_update(self): hdr = self.header_class() - assert_true(hdr['num_frames'] == 0) + assert hdr['num_frames'] == 0 hdr['num_frames'] = 2 - assert_true(hdr['num_frames'] == 2) + assert hdr['num_frames'] == 2 def test_from_eg_file(self): # Example header is big-endian with Opener(self.example_file) as fileobj: hdr = self.header_class.from_fileobj(fileobj, check=False) - assert_equal(hdr.endianness, '>') + assert hdr.endianness == '>' class TestEcatMlist(TestCase): @@ -93,9 +94,9 @@ def test_mlist(self): mats = np.recarray(shape=(32, 4), dtype=dt, buf=dat) fid.close() # tests - assert_true(mats['matlist'][0, 0] + mats['matlist'][0, 3] == 31) - assert_true(get_frame_order(mlist)[0][0] == 0) - assert_true(get_frame_order(mlist)[0][1] == 16842758.0) + assert mats['matlist'][0, 0] + mats['matlist'][0, 3] == 31 + assert get_frame_order(mlist)[0][0] == 0 + assert get_frame_order(mlist)[0][1] == 16842758.0 # test badly ordered mlist badordermlist = np.array([[1.68427540e+07, 3.00000000e+00, 1.20350000e+04, 1.00000000e+00], @@ -110,7 +111,7 @@ def test_mlist(self): [1.68427580e+07, 6.01680000e+04, 7.22000000e+04, 1.00000000e+00]]) with suppress_warnings(): # STORED order - assert_true(get_frame_order(badordermlist)[0][0] == 1) + assert get_frame_order(badordermlist)[0][0] == 1 def test_mlist_errors(self): fid = open(self.example_file, 'rb') @@ -132,17 +133,18 @@ def test_mlist_errors(self): with suppress_warnings(): # STORED order series_framenumbers = get_series_framenumbers(mlist) # first frame stored was actually 2nd frame acquired - assert_true(series_framenumbers[0] == 2) + assert series_framenumbers[0] == 2 order = [series_framenumbers[x] for x in sorted(series_framenumbers)] # true series order is [2,1,3,4,5,6], note counting starts at 1 - assert_true(order == [2, 1, 3, 4, 5, 6]) + assert order == [2, 1, 3, 4, 5, 6] mlist[0, 0] = 0 with suppress_warnings(): frames_order = get_frame_order(mlist) neworder = [frames_order[x][0] for x in sorted(frames_order)] - assert_true(neworder == [1, 2, 3, 4, 5]) + assert neworder == [1, 2, 3, 4, 5] with suppress_warnings(): - assert_raises(IOError, get_series_framenumbers, mlist) + with pytest.raises(IOError): + get_series_framenumbers(mlist) class TestEcatSubHeader(TestCase): @@ -155,26 +157,26 @@ class TestEcatSubHeader(TestCase): subhdr = subhdr_class(hdr, mlist, fid) def test_subheader_size(self): - assert_equal(self.subhdr_class._subhdrdtype.itemsize, 510) + assert self.subhdr_class._subhdrdtype.itemsize == 510 def test_subheader(self): - assert_equal(self.subhdr.get_shape(), (10, 10, 3)) - assert_equal(self.subhdr.get_nframes(), 1) - assert_equal(self.subhdr.get_nframes(), + assert self.subhdr.get_shape() == (10, 10, 3) + assert self.subhdr.get_nframes() == 1 + assert (self.subhdr.get_nframes() == len(self.subhdr.subheaders)) - assert_equal(self.subhdr._check_affines(), True) + assert self.subhdr._check_affines() == True assert_array_almost_equal(np.diag(self.subhdr.get_frame_affine()), np.array([2.20241979, 2.20241979, 3.125, 1.])) - assert_equal(self.subhdr.get_zooms()[0], 2.20241978764534) - assert_equal(self.subhdr.get_zooms()[2], 3.125) - assert_equal(self.subhdr._get_data_dtype(0), np.int16) + assert self.subhdr.get_zooms()[0] == 2.20241978764534 + assert self.subhdr.get_zooms()[2] == 3.125 + assert self.subhdr._get_data_dtype(0) == np.int16 #assert_equal(self.subhdr._get_frame_offset(), 1024) - assert_equal(self.subhdr._get_frame_offset(), 1536) + assert self.subhdr._get_frame_offset() == 1536 dat = self.subhdr.raw_data_from_fileobj() - assert_equal(dat.shape, self.subhdr.get_shape()) - assert_equal(self.subhdr.subheaders[0]['scale_factor'].item(), 1.0) + assert dat.shape == self.subhdr.get_shape() + assert self.subhdr.subheaders[0]['scale_factor'].item() == 1.0 ecat_calib_factor = self.hdr['ecat_calibration_factor'] - assert_equal(ecat_calib_factor, 25007614.0) + assert ecat_calib_factor == 25007614.0 class TestEcatImage(TestCase): @@ -183,9 +185,9 @@ class TestEcatImage(TestCase): img = image_class.load(example_file) def test_file(self): - assert_equal(self.img.file_map['header'].filename, + assert (self.img.file_map['header'].filename == self.example_file) - assert_equal(self.img.file_map['image'].filename, + assert (self.img.file_map['image'].filename == self.example_file) def test_save(self): @@ -200,7 +202,7 @@ def test_save(self): def test_data(self): dat = self.img.get_fdata() - assert_equal(dat.shape, self.img.shape) + assert dat.shape == self.img.shape frame = self.img.get_frame(0) assert_array_equal(frame, dat[:, :, :, 0]) @@ -220,7 +222,7 @@ def test_array_proxy_slicing(self): # Test slicing of array proxy arr = self.img.get_fdata() prox = self.img.dataobj - assert_true(prox.is_proxy) + assert prox.is_proxy for sliceobj in slicer_samples(self.img.shape): assert_array_equal(arr[sliceobj], prox[sliceobj]) @@ -235,7 +237,7 @@ def test_isolation(self): img = img_klass(arr, aff, hdr, sub_hdr, mlist) assert_array_equal(img.affine, aff) aff[0, 0] = 99 - assert_false(np.all(img.affine == aff)) + assert not np.all(img.affine == aff) def test_float_affine(self): # Check affines get converted to float @@ -246,9 +248,9 @@ def test_float_affine(self): self.img.get_subheaders(), self.img.get_mlist()) img = img_klass(arr, aff.astype(np.float32), hdr, sub_hdr, mlist) - assert_equal(img.get_affine().dtype, np.dtype(np.float64)) + assert img.get_affine().dtype == np.dtype(np.float64) img = img_klass(arr, aff.astype(np.int16), hdr, sub_hdr, mlist) - assert_equal(img.get_affine().dtype, np.dtype(np.float64)) + assert img.get_affine().dtype == np.dtype(np.float64) def test_data_regression(self): # Test whether data read has changed since 1.3.0 @@ -257,8 +259,8 @@ def test_data_regression(self): min=1125342630.0, mean=117907565661.46666) data = self.img.get_fdata() - assert_equal(data.max(), vals['max']) - assert_equal(data.min(), vals['min']) + assert data.max() == vals['max'] + assert data.min() == vals['min'] assert_array_almost_equal(data.mean(), vals['mean']) def test_mlist_regression(self): @@ -273,8 +275,8 @@ def test_from_filespec_deprecation(): warnings.simplefilter('always', DeprecationWarning) # No warning for standard load img_loaded = EcatImage.load(ecat_file) - assert_equal(len(w), 0) + assert len(w) == 0 # Warning for from_filespec img_speced = EcatImage.from_filespec(ecat_file) - assert_equal(len(w), 1) + assert len(w) == 1 assert_array_equal(img_loaded.get_fdata(), img_speced.get_fdata()) diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index 4b187bf855..1accd01a14 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -17,7 +17,6 @@ from .nibabel_data import get_nibabel_data, needs_nibabel_data from ..ecat import load -from nose.tools import assert_equal from numpy.testing import (assert_array_equal, assert_almost_equal) ECAT_TEST_PATH = pjoin(get_nibabel_data(), 'nipy-ecattest') @@ -40,11 +39,11 @@ class TestNegatives(object): def test_load(self): # Check highest level load of minc works img = self.opener(self.example_params['fname']) - assert_equal(img.shape, self.example_params['shape']) - assert_equal(img.get_data_dtype(0).type, self.example_params['type']) + assert img.shape == self.example_params['shape'] + assert img.get_data_dtype(0).type == self.example_params['type'] # Check correspondence of data and recorded shape data = img.get_fdata() - assert_equal(data.shape, self.example_params['shape']) + assert data.shape == self.example_params['shape'] # min, max, mean values from given parameters assert_almost_equal(data.min(), self.example_params['min'], 4) assert_almost_equal(data.max(), self.example_params['max'], 4) diff --git a/nibabel/tests/test_endiancodes.py b/nibabel/tests/test_endiancodes.py index 805de0d572..94c9ea0344 100644 --- a/nibabel/tests/test_endiancodes.py +++ b/nibabel/tests/test_endiancodes.py @@ -10,31 +10,27 @@ import sys - -from nose.tools import assert_equal -from nose.tools import assert_true - from ..volumeutils import (endian_codes, native_code, swapped_code) def test_native_swapped(): native_is_le = sys.byteorder == 'little' if native_is_le: - assert_equal((native_code, swapped_code), ('<', '>')) + assert (native_code, swapped_code) == ('<', '>') else: - assert_equal((native_code, swapped_code), ('>', '<')) + assert (native_code, swapped_code) == ('>', '<') def test_to_numpy(): if sys.byteorder == 'little': - yield assert_true, endian_codes['native'] == '<' - yield assert_true, endian_codes['swapped'] == '>' + assert endian_codes['native'] == '<' + assert endian_codes['swapped'] == '>' else: - yield assert_true, endian_codes['native'] == '>' - yield assert_true, endian_codes['swapped'] == '<' - yield assert_true, endian_codes['native'] == endian_codes['='] - yield assert_true, endian_codes['big'] == '>' + assert endian_codes['native'] == '>' + assert endian_codes['swapped'] == '<' + assert endian_codes['native'] == endian_codes['='] + assert endian_codes['big'] == '>' for code in ('little', '<', 'l', 'L', 'le'): - yield assert_true, endian_codes[code] == '<' + assert endian_codes[code] == '<' for code in ('big', '>', 'b', 'B', 'be'): - yield assert_true, endian_codes[code] == '>' + assert endian_codes[code] == '>' diff --git a/nibabel/tests/test_environment.py b/nibabel/tests/test_environment.py index 7b02ea866f..6dc127c95f 100644 --- a/nibabel/tests/test_environment.py +++ b/nibabel/tests/test_environment.py @@ -8,52 +8,44 @@ from .. import environment as nibe -from numpy.testing import (assert_array_almost_equal, - assert_array_equal) +import pytest -from nose.tools import assert_equal - -from nose import with_setup - -GIVEN_ENV = {} DATA_KEY = 'NIPY_DATA_PATH' USER_KEY = 'NIPY_USER_DIR' -def setup_environment(): +@pytest.fixture() +def with_environment(request): """Setup test environment for some functions that are tested in this module. In particular this functions stores attributes and other things that we need to stub in some test functions. This needs to be done on a function level and not module level because each testfunction needs a pristine environment. """ - global GIVEN_ENV + GIVEN_ENV = {} GIVEN_ENV['env'] = env.copy() -def teardown_environment(): - """Restore things that were remembered by the setup_environment function - """ - orig_env = GIVEN_ENV['env'] - # Pull keys out into list to avoid altering dictionary during iteration, - # causing python 3 error - for key in list(env.keys()): - if key not in orig_env: - del env[key] - env.update(orig_env) - + def teardown_environment(): + """Restore things that were remembered by the setup_environment function + """ + orig_env = GIVEN_ENV['env'] + # Pull keys out into list to avoid altering dictionary during iteration, + # causing python 3 error + for key in list(env.keys()): + if key not in orig_env: + del env[key] + env.update(orig_env) -# decorator to use setup, teardown environment -with_environment = with_setup(setup_environment, teardown_environment) + request.addfinalizer(teardown_environment) def test_nipy_home(): # Test logic for nipy home directory - assert_equal(nibe.get_home_dir(), os.path.expanduser('~')) + assert nibe.get_home_dir() == os.path.expanduser('~') -@with_environment -def test_user_dir(): +def test_user_dir(with_environment): if USER_KEY in env: del env[USER_KEY] home_dir = nibe.get_home_dir() @@ -61,16 +53,16 @@ def test_user_dir(): exp = pjoin(home_dir, '.nipy') else: exp = pjoin(home_dir, '_nipy') - assert_equal(exp, nibe.get_nipy_user_dir()) + assert exp == nibe.get_nipy_user_dir() env[USER_KEY] = '/a/path' - assert_equal(abspath('/a/path'), nibe.get_nipy_user_dir()) + assert abspath('/a/path') == nibe.get_nipy_user_dir() def test_sys_dir(): sys_dir = nibe.get_nipy_system_dir() if os.name == 'nt': - assert_equal(sys_dir, r'C:\etc\nipy') + assert sys_dir == r'C:\etc\nipy' elif os.name == 'posix': - assert_equal(sys_dir, r'/etc/nipy') + assert sys_dir == r'/etc/nipy' else: - assert_equal(sys_dir, None) + assert sys_dir == None diff --git a/nibabel/tests/test_euler.py b/nibabel/tests/test_euler.py index 0d7027222f..915e65e552 100644 --- a/nibabel/tests/test_euler.py +++ b/nibabel/tests/test_euler.py @@ -15,9 +15,7 @@ from .. import eulerangles as nea from .. import quaternions as nq -from nose.tools import assert_false -from nose.tools import assert_true - +import pytest from numpy.testing import assert_array_equal, assert_array_almost_equal FLOAT_EPS = np.finfo(np.float).eps @@ -90,36 +88,38 @@ def test_basic_euler(): M2 = nea.euler2mat(0, yr) M3 = nea.euler2mat(0, 0, xr) # which are all valid rotation matrices - yield assert_true, is_valid_rotation(M) - yield assert_true, is_valid_rotation(M1) - yield assert_true, is_valid_rotation(M2) - yield assert_true, is_valid_rotation(M3) + assert is_valid_rotation(M) + assert is_valid_rotation(M1) + assert is_valid_rotation(M2) + assert is_valid_rotation(M3) # Full matrix is composition of three individual matrices - yield assert_true, np.allclose(M, np.dot(M3, np.dot(M2, M1))) + assert np.allclose(M, np.dot(M3, np.dot(M2, M1))) # Rotations can be specified with named args, default 0 - yield assert_true, np.all(nea.euler2mat(zr) == nea.euler2mat(z=zr)) - yield assert_true, np.all(nea.euler2mat(0, yr) == nea.euler2mat(y=yr)) - yield assert_true, np.all(nea.euler2mat(0, 0, xr) == nea.euler2mat(x=xr)) + assert np.all(nea.euler2mat(zr) == nea.euler2mat(z=zr)) + assert np.all(nea.euler2mat(0, yr) == nea.euler2mat(y=yr)) + assert np.all(nea.euler2mat(0, 0, xr) == nea.euler2mat(x=xr)) # Applying an opposite rotation same as inverse (the inverse is # the same as the transpose, but just for clarity) - yield assert_true, np.allclose(nea.euler2mat(x=-xr), + assert np.allclose(nea.euler2mat(x=-xr), np.linalg.inv(nea.euler2mat(x=xr))) -def test_euler_mat(): +def test_euler_mat_1(): M = nea.euler2mat() - yield assert_array_equal, M, np.eye(3) - for x, y, z in eg_rots: - M1 = nea.euler2mat(z, y, x) - M2 = sympy_euler(z, y, x) - yield assert_array_almost_equal, M1, M2 - M3 = np.dot(x_only(x), np.dot(y_only(y), z_only(z))) - yield assert_array_almost_equal, M1, M3 - zp, yp, xp = nea.mat2euler(M1) - # The parameters may not be the same as input, but they give the - # same rotation matrix - M4 = nea.euler2mat(zp, yp, xp) - yield assert_array_almost_equal, M1, M4 + assert_array_equal(M, np.eye(3)) + +@pytest.mark.parametrize("x, y, z", eg_rots) +def test_euler_mat_2(x, y, z): + M1 = nea.euler2mat(z, y, x) + M2 = sympy_euler(z, y, x) + assert_array_almost_equal(M1, M2) + M3 = np.dot(x_only(x), np.dot(y_only(y), z_only(z))) + assert_array_almost_equal(M1, M3) + zp, yp, xp = nea.mat2euler(M1) + # The parameters may not be the same as input, but they give the + # same rotation matrix + M4 = nea.euler2mat(zp, yp, xp) + assert_array_almost_equal(M1, M4) def sympy_euler2quat(z=0, y=0, x=0): @@ -148,27 +148,27 @@ def test_euler_instability(): M = nea.euler2mat(*zyx) # Round trip M_back = nea.euler2mat(*nea.mat2euler(M)) - yield assert_true, np.allclose(M, M_back) + assert np.allclose(M, M_back) # disturb matrix slightly M_e = M - FLOAT_EPS # round trip to test - OK M_e_back = nea.euler2mat(*nea.mat2euler(M_e)) - yield assert_true, np.allclose(M_e, M_e_back) + assert np.allclose(M_e, M_e_back) # not so with crude routine M_e_back = nea.euler2mat(*crude_mat2euler(M_e)) - yield assert_false, np.allclose(M_e, M_e_back) - - -def test_quats(): - for x, y, z in eg_rots: - M1 = nea.euler2mat(z, y, x) - quatM = nq.mat2quat(M1) - quat = nea.euler2quat(z, y, x) - yield nq.nearly_equivalent, quatM, quat - quatS = sympy_euler2quat(z, y, x) - yield nq.nearly_equivalent, quat, quatS - zp, yp, xp = nea.quat2euler(quat) - # The parameters may not be the same as input, but they give the - # same rotation matrix - M2 = nea.euler2mat(zp, yp, xp) - yield assert_array_almost_equal, M1, M2 + assert not np.allclose(M_e, M_e_back) + + +@pytest.mark.parametrize("x, y, z", eg_rots) +def test_quats(x, y, z): + M1 = nea.euler2mat(z, y, x) + quatM = nq.mat2quat(M1) + quat = nea.euler2quat(z, y, x) + assert nq.nearly_equivalent(quatM, quat) + quatS = sympy_euler2quat(z, y, x) + assert nq.nearly_equivalent(quat, quatS) + zp, yp, xp = nea.quat2euler(quat) + # The parameters may not be the same as input, but they give the + # same rotation matrix + M2 = nea.euler2mat(zp, yp, xp) + assert_array_almost_equal(M1, M2) diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index a9c5668508..efac76a65a 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -10,9 +10,6 @@ from .test_image_api import GenericImageAPI, SerializeMixin -from nose.tools import (assert_true, assert_false, assert_equal, - assert_not_equal) - class FBNumpyImage(FileBasedImage): header_class = FileBasedHeader @@ -113,20 +110,20 @@ def __init__(self, seq=None): in_list = [1, 3, 2] hdr = H(in_list) hdr_c = hdr.copy() - assert_equal(hdr_c.a_list, hdr.a_list) + assert hdr_c.a_list == hdr.a_list # Copy is independent of original hdr_c.a_list[0] = 99 - assert_not_equal(hdr_c.a_list, hdr.a_list) + assert hdr_c.a_list != hdr.a_list # From header does a copy hdr2 = H.from_header(hdr) - assert_true(isinstance(hdr2, H)) - assert_equal(hdr2.a_list, hdr.a_list) + assert isinstance(hdr2, H) + assert hdr2.a_list == hdr.a_list hdr2.a_list[0] = 42 - assert_not_equal(hdr2.a_list, hdr.a_list) + assert hdr2.a_list != hdr.a_list # Default header input to from_heder gives new empty header hdr3 = H.from_header() - assert_true(isinstance(hdr3, H)) - assert_equal(hdr3.a_list, []) + assert isinstance(hdr3, H) + assert hdr3.a_list == [] hdr4 = H.from_header(None) - assert_true(isinstance(hdr4, H)) - assert_equal(hdr4.a_list, []) + assert isinstance(hdr4, H) + assert hdr4.a_list == [] diff --git a/nibabel/tests/test_filehandles.py b/nibabel/tests/test_filehandles.py index 1533b7c4f8..23ae573a70 100644 --- a/nibabel/tests/test_filehandles.py +++ b/nibabel/tests/test_filehandles.py @@ -20,10 +20,6 @@ from ..loadsave import load, save from ..nifti1 import Nifti1Image -from numpy.testing import (assert_array_almost_equal, - assert_array_equal) - - def test_multiload(): # Make a tiny image, save, load many times. If we are leaking filehandles, diff --git a/nibabel/tests/test_fileholders.py b/nibabel/tests/test_fileholders.py index b28727a47e..e31a6efcbc 100644 --- a/nibabel/tests/test_fileholders.py +++ b/nibabel/tests/test_fileholders.py @@ -6,56 +6,49 @@ from ..fileholders import FileHolder -from numpy.testing import (assert_array_almost_equal, - assert_array_equal) - -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_true - def test_init(): fh = FileHolder('a_fname') - assert_equal(fh.filename, 'a_fname') - assert_true(fh.fileobj is None) - assert_equal(fh.pos, 0) + assert fh.filename == 'a_fname' + assert fh.fileobj is None + assert fh.pos == 0 sio0 = BytesIO() fh = FileHolder('a_test', sio0) - assert_equal(fh.filename, 'a_test') - assert_true(fh.fileobj is sio0) - assert_equal(fh.pos, 0) + assert fh.filename == 'a_test' + assert fh.fileobj is sio0 + assert fh.pos == 0 fh = FileHolder('a_test_2', sio0, 3) - assert_equal(fh.filename, 'a_test_2') - assert_true(fh.fileobj is sio0) - assert_equal(fh.pos, 3) + assert fh.filename == 'a_test_2' + assert fh.fileobj is sio0 + assert fh.pos == 3 def test_same_file_as(): fh = FileHolder('a_fname') - assert_true(fh.same_file_as(fh)) + assert fh.same_file_as(fh) fh2 = FileHolder('a_test') - assert_false(fh.same_file_as(fh2)) + assert not fh.same_file_as(fh2) sio0 = BytesIO() fh3 = FileHolder('a_fname', sio0) fh4 = FileHolder('a_fname', sio0) - assert_true(fh3.same_file_as(fh4)) - assert_false(fh3.same_file_as(fh)) + assert fh3.same_file_as(fh4) + assert not fh3.same_file_as(fh) fh5 = FileHolder(fileobj=sio0) fh6 = FileHolder(fileobj=sio0) - assert_true(fh5.same_file_as(fh6)) + assert fh5.same_file_as(fh6) # Not if the filename is the same - assert_false(fh5.same_file_as(fh3)) + assert not fh5.same_file_as(fh3) # pos doesn't matter fh4_again = FileHolder('a_fname', sio0, pos=4) - assert_true(fh3.same_file_as(fh4_again)) + assert fh3.same_file_as(fh4_again) def test_file_like(): # Test returning file object or filename fh = FileHolder('a_fname') - assert_equal(fh.file_like, 'a_fname') + assert fh.file_like == 'a_fname' bio = BytesIO() fh = FileHolder(fileobj=bio) - assert_true(fh.file_like is bio) + assert fh.file_like is bio fh = FileHolder('a_fname', fileobj=bio) - assert_true(fh.file_like is bio) + assert fh.file_like is bio diff --git a/nibabel/tests/test_filename_parser.py b/nibabel/tests/test_filename_parser.py index f7317ac183..22178a4349 100644 --- a/nibabel/tests/test_filename_parser.py +++ b/nibabel/tests/test_filename_parser.py @@ -11,58 +11,53 @@ from ..filename_parser import (types_filenames, TypesFilenamesError, parse_filename, splitext_addext) -from nose.tools import (assert_equal, assert_true, assert_false, - assert_raises) +import pytest def test_filenames(): types_exts = (('image', '.img'), ('header', '.hdr')) for t_fname in ('test.img', 'test.hdr', 'test', 'test.'): tfns = types_filenames(t_fname, types_exts) - assert_equal(tfns, + assert (tfns == {'image': 'test.img', 'header': 'test.hdr'}) # enforcing extensions raises an error for bad extension - assert_raises(TypesFilenamesError, - types_filenames, - 'test.funny', - types_exts) + with pytest.raises(TypesFilenamesError): + types_filenames('test.funny', types_exts) # If not enforcing extensions, it does the best job it can, # assuming the passed filename is for the first type (in this case # 'image') tfns = types_filenames('test.funny', types_exts, enforce_extensions=False) - assert_equal(tfns, + assert (tfns == {'header': 'test.hdr', 'image': 'test.funny'}) # .gz and .bz2 suffixes to extensions, by default, are removed # before extension checking etc, and then put back onto every # returned filename. tfns = types_filenames('test.img.gz', types_exts) - assert_equal(tfns, + assert (tfns == {'header': 'test.hdr.gz', 'image': 'test.img.gz'}) tfns = types_filenames('test.img.bz2', types_exts) - assert_equal(tfns, + assert (tfns == {'header': 'test.hdr.bz2', 'image': 'test.img.bz2'}) # of course, if we don't know about e.g. gz, and enforce_extensions # is on, we get an errror - assert_raises(TypesFilenamesError, - types_filenames, - 'test.img.gz', - types_exts, ()) + with pytest.raises(TypesFilenamesError): + types_filenames('test.img.gz', types_exts, ()) # if we don't know about .gz extension, and not enforcing, then we # get something a bit odd tfns = types_filenames('test.img.gz', types_exts, trailing_suffixes=(), enforce_extensions=False) - assert_equal(tfns, + assert (tfns == {'header': 'test.img.hdr', 'image': 'test.img.gz'}) # the suffixes we remove and replaces can be any suffixes. tfns = types_filenames('test.img.bzr', types_exts, ('.bzr',)) - assert_equal(tfns, + assert (tfns == {'header': 'test.hdr.bzr', 'image': 'test.img.bzr'}) # If we specifically pass the remove / replace suffixes, then we @@ -71,37 +66,33 @@ def test_filenames(): tfns = types_filenames('test.img.bzr', types_exts, trailing_suffixes=('.bzr',), enforce_extensions=False) - assert_equal(tfns, + assert (tfns == {'header': 'test.hdr.bzr', 'image': 'test.img.bzr'}) # but, just .gz or .bz2 as extension gives an error, if enforcing is on - assert_raises(TypesFilenamesError, - types_filenames, - 'test.gz', - types_exts) - assert_raises(TypesFilenamesError, - types_filenames, - 'test.bz2', - types_exts) + with pytest.raises(TypesFilenamesError): + types_filenames('test.gz', types_exts) + with pytest.raises(TypesFilenamesError): + types_filenames('test.bz2', types_exts) # if enforcing is off, it tries to work out what the other files # should be assuming the passed filename is of the first input type tfns = types_filenames('test.gz', types_exts, enforce_extensions=False) - assert_equal(tfns, + assert (tfns == {'image': 'test.gz', 'header': 'test.hdr.gz'}) # case (in)sensitivity, and effect of uppercase, lowercase tfns = types_filenames('test.IMG', types_exts) - assert_equal(tfns, + assert (tfns == {'image': 'test.IMG', 'header': 'test.HDR'}) tfns = types_filenames('test.img', (('image', '.IMG'), ('header', '.HDR'))) - assert_equal(tfns, + assert (tfns == {'header': 'test.hdr', 'image': 'test.img'}) tfns = types_filenames('test.IMG.Gz', types_exts) - assert_equal(tfns, + assert (tfns == {'image': 'test.IMG.Gz', 'header': 'test.HDR.Gz'}) @@ -121,52 +112,52 @@ def test_parse_filename(): for inps, exps in exp_in_outs: pth, sufs = inps res = parse_filename(pth, types_exts, sufs) - assert_equal(res, exps) + assert res == exps upth = pth.upper() uexps = (exps[0].upper(), exps[1].upper(), exps[2].upper() if exps[2] else None, exps[3]) res = parse_filename(upth, types_exts, sufs) - assert_equal(res, uexps) + assert res == uexps # test case sensitivity res = parse_filename('/path/fnameext2.GZ', types_exts, ('.gz',), False) # case insensitive again - assert_equal(res, ('/path/fname', 'ext2', '.GZ', 't2')) + assert res == ('/path/fname', 'ext2', '.GZ', 't2') res = parse_filename('/path/fnameext2.GZ', types_exts, ('.gz',), True) # case sensitive - assert_equal(res, ('/path/fnameext2', '.GZ', None, None)) + assert res == ('/path/fnameext2', '.GZ', None, None) res = parse_filename('/path/fnameEXT2.gz', types_exts, ('.gz',), False) # case insensitive - assert_equal(res, ('/path/fname', 'EXT2', '.gz', 't2')) + assert res == ('/path/fname', 'EXT2', '.gz', 't2') res = parse_filename('/path/fnameEXT2.gz', types_exts, ('.gz',), True) # case sensitive - assert_equal(res, ('/path/fnameEXT2', '', '.gz', None)) + assert res == ('/path/fnameEXT2', '', '.gz', None) def test_splitext_addext(): res = splitext_addext('fname.ext.gz') - assert_equal(res, ('fname', '.ext', '.gz')) + assert res == ('fname', '.ext', '.gz') res = splitext_addext('fname.ext') - assert_equal(res, ('fname', '.ext', '')) + assert res == ('fname', '.ext', '') res = splitext_addext('fname.ext.foo', ('.foo', '.bar')) - assert_equal(res, ('fname', '.ext', '.foo')) + assert res == ('fname', '.ext', '.foo') res = splitext_addext('fname.ext.FOO', ('.foo', '.bar')) - assert_equal(res, ('fname', '.ext', '.FOO')) + assert res == ('fname', '.ext', '.FOO') # case sensitive res = splitext_addext('fname.ext.FOO', ('.foo', '.bar'), True) - assert_equal(res, ('fname.ext', '.FOO', '')) + assert res == ('fname.ext', '.FOO', '') # edge cases res = splitext_addext('.nii') - assert_equal(res, ('', '.nii', '')) + assert res == ('', '.nii', '') res = splitext_addext('...nii') - assert_equal(res, ('..', '.nii', '')) + assert res == ('..', '.nii', '') res = splitext_addext('.') - assert_equal(res, ('.', '', '')) + assert res == ('.', '', '') res = splitext_addext('..') - assert_equal(res, ('..', '', '')) + assert res == ('..', '', '') res = splitext_addext('...') - assert_equal(res, ('...', '', '')) + assert res == ('...', '', '') diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index 1994741a1a..a75484159e 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -17,10 +17,8 @@ from ..fileholders import FileHolderError from ..spatialimages import SpatialImage -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) - from numpy.testing import assert_array_equal - +import pytest def test_files_spatialimages(): # test files creation in image classes @@ -31,9 +29,9 @@ def test_files_spatialimages(): for klass in klasses: file_map = klass.make_file_map() for key, value in file_map.items(): - assert_equal(value.filename, None) - assert_equal(value.fileobj, None) - assert_equal(value.pos, 0) + assert value.filename == None + assert value.fileobj == None + assert value.pos == 0 # If we can't create new images in memory without loading, bail here if not klass.makeable: continue @@ -44,9 +42,9 @@ def test_files_spatialimages(): else: img = klass(arr, aff) for key, value in img.file_map.items(): - assert_equal(value.filename, None) - assert_equal(value.fileobj, None) - assert_equal(value.pos, 0) + assert value.filename == None + assert value.fileobj == None + assert value.pos == 0 def test_files_interface(): @@ -56,15 +54,16 @@ def test_files_interface(): img = Nifti1Image(arr, aff) # single image img.set_filename('test') - assert_equal(img.get_filename(), 'test.nii') - assert_equal(img.file_map['image'].filename, 'test.nii') - assert_raises(KeyError, img.file_map.__getitem__, 'header') + assert img.get_filename() == 'test.nii' + assert img.file_map['image'].filename == 'test.nii' + with pytest.raises(KeyError): + img.file_map.__getitem__('header') # pair - note new class img = Nifti1Pair(arr, aff) img.set_filename('test') - assert_equal(img.get_filename(), 'test.img') - assert_equal(img.file_map['image'].filename, 'test.img') - assert_equal(img.file_map['header'].filename, 'test.hdr') + assert img.get_filename() == 'test.img' + assert img.file_map['image'].filename == 'test.img' + assert img.file_map['header'].filename == 'test.hdr' # fileobjs - single image img = Nifti1Image(arr, aff) img.file_map['image'].fileobj = BytesIO() @@ -76,7 +75,8 @@ def test_files_interface(): img = Nifti1Pair(arr, aff) img.file_map['image'].fileobj = BytesIO() # no header yet - assert_raises(FileHolderError, img.to_file_map) + with pytest.raises(FileHolderError): + img.to_file_map() img.file_map['header'].fileobj = BytesIO() img.to_file_map() # saves to files img2 = Nifti1Pair.from_file_map(img.file_map) diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index 19735200eb..5c80ae01b5 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -1,11 +1,9 @@ """ Test slicing of file-like objects """ -import sys from io import BytesIO from itertools import product from functools import partial -from distutils.version import LooseVersion from threading import Thread, Lock import time @@ -18,10 +16,7 @@ calc_slicedefs, _simple_fileslice, slice2outax, strided_scalar) -from nose.tools import assert_equal -from nose.tools import assert_false -from nose.tools import assert_raises - +import pytest from numpy.testing import assert_array_equal @@ -34,7 +29,7 @@ def _check_slice(sliceobj): # Check if this is a view a[:] = 99 b_is_view = np.all(b == 99) - assert_equal(not is_fancy(sliceobj), b_is_view) + assert (not is_fancy(sliceobj)) == b_is_view def test_is_fancy(): @@ -48,11 +43,11 @@ def test_is_fancy(): if maybe_bad and slice1 is Ellipsis: continue _check_slice((slice0, slice1)) - assert_false(is_fancy((None,))) - assert_false(is_fancy((None, 1))) - assert_false(is_fancy((1, None))) + assert not is_fancy((None,)) + assert not is_fancy((None, 1)) + assert not is_fancy((1, None)) # Chack that actual False returned (rather than falsey) - assert_equal(is_fancy(1), False) + assert is_fancy(1) == False def test_canonical_slicers(): @@ -65,90 +60,96 @@ def test_canonical_slicers(): 2) shape = (10, 10) for slice0 in slicers: - assert_equal(canonical_slicers((slice0,), shape), (slice0, slice(None))) + assert canonical_slicers((slice0,), shape) == (slice0, slice(None)) for slice1 in slicers: sliceobj = (slice0, slice1) - assert_equal(canonical_slicers(sliceobj, shape), sliceobj) - assert_equal(canonical_slicers(sliceobj, shape + (2, 3, 4)), + assert canonical_slicers(sliceobj, shape) == sliceobj + assert (canonical_slicers(sliceobj, shape + (2, 3, 4)) == sliceobj + (slice(None),) * 3) - assert_equal(canonical_slicers(sliceobj * 3, shape * 3), + assert (canonical_slicers(sliceobj * 3, shape * 3) == sliceobj * 3) # Check None passes through - assert_equal(canonical_slicers(sliceobj + (None,), shape), + assert (canonical_slicers(sliceobj + (None,), shape) == sliceobj + (None,)) - assert_equal(canonical_slicers((None,) + sliceobj, shape), + assert (canonical_slicers((None,) + sliceobj, shape) == (None,) + sliceobj) - assert_equal(canonical_slicers((None,) + sliceobj + (None,), shape), + assert (canonical_slicers((None,) + sliceobj + (None,), shape) == (None,) + sliceobj + (None,)) # Check Ellipsis - assert_equal(canonical_slicers((Ellipsis,), shape), + assert (canonical_slicers((Ellipsis,), shape) == (slice(None), slice(None))) - assert_equal(canonical_slicers((Ellipsis, None), shape), + assert (canonical_slicers((Ellipsis, None), shape) == (slice(None), slice(None), None)) - assert_equal(canonical_slicers((Ellipsis, 1), shape), + assert (canonical_slicers((Ellipsis, 1), shape) == (slice(None), 1)) - assert_equal(canonical_slicers((1, Ellipsis), shape), + assert (canonical_slicers((1, Ellipsis), shape) == (1, slice(None))) # Ellipsis at end does nothing - assert_equal(canonical_slicers((1, 1, Ellipsis), shape), + assert (canonical_slicers((1, 1, Ellipsis), shape) == (1, 1)) - assert_equal(canonical_slicers((1, Ellipsis, 2), (10, 1, 2, 3, 11)), + assert (canonical_slicers((1, Ellipsis, 2), (10, 1, 2, 3, 11)) == (1, slice(None), slice(None), slice(None), 2)) - assert_raises(ValueError, - canonical_slicers, (Ellipsis, 1, Ellipsis), (2, 3, 4, 5)) + with pytest.raises(ValueError): + canonical_slicers((Ellipsis, 1, Ellipsis), (2, 3, 4, 5)) # Check full slices get expanded for slice0 in (slice(10), slice(0, 10), slice(0, 10, 1)): - assert_equal(canonical_slicers((slice0, 1), shape), + assert (canonical_slicers((slice0, 1), shape) == (slice(None), 1)) for slice0 in (slice(10), slice(0, 10), slice(0, 10, 1)): - assert_equal(canonical_slicers((slice0, 1), shape), + assert (canonical_slicers((slice0, 1), shape) == (slice(None), 1)) - assert_equal(canonical_slicers((1, slice0), shape), + assert (canonical_slicers((1, slice0), shape) == (1, slice(None))) # Check ints etc get parsed through to tuples - assert_equal(canonical_slicers(1, shape), (1, slice(None))) - assert_equal(canonical_slicers(slice(None), shape), + assert canonical_slicers(1, shape) == (1, slice(None)) + assert (canonical_slicers(slice(None), shape) == (slice(None), slice(None))) # Check fancy indexing raises error - assert_raises(ValueError, canonical_slicers, (np.array(1), 1), shape) - assert_raises(ValueError, canonical_slicers, (1, np.array(1)), shape) + with pytest.raises(ValueError): + canonical_slicers((np.array(1), 1), shape) + with pytest.raises(ValueError): + canonical_slicers((1, np.array(1)), shape) # Check out of range integer raises error - assert_raises(ValueError, canonical_slicers, (10,), shape) - assert_raises(ValueError, canonical_slicers, (1, 10), shape) - assert_raises(ValueError, canonical_slicers, (10,), shape, True) - assert_raises(ValueError, canonical_slicers, (1, 10), shape, True) + with pytest.raises(ValueError): + canonical_slicers((10,), shape) + with pytest.raises(ValueError): + canonical_slicers((1, 10), shape) + with pytest.raises(ValueError): + canonical_slicers((10,), shape, True) + with pytest.raises(ValueError): + canonical_slicers((1, 10), shape, True) # Unless check_inds is False - assert_equal(canonical_slicers((10,), shape, False), (10, slice(None))) - assert_equal(canonical_slicers((1, 10,), shape, False), (1, 10)) + assert canonical_slicers((10,), shape, False) == (10, slice(None)) + assert canonical_slicers((1, 10,), shape, False) == (1, 10) # Check negative -> positive - assert_equal(canonical_slicers(-1, shape), (9, slice(None))) - assert_equal(canonical_slicers((slice(None), -1), shape), (slice(None), 9)) + assert canonical_slicers(-1, shape) == (9, slice(None)) + assert canonical_slicers((slice(None), -1), shape) == (slice(None), 9) def test_slice2outax(): # Test function giving output axes from input ndims and slice sn = slice(None) - assert_equal(slice2outax(1, (sn,)), (0,)) - assert_equal(slice2outax(1, (1,)), (None,)) - assert_equal(slice2outax(1, (None,)), (1,)) - assert_equal(slice2outax(1, (None, 1)), (None,)) - assert_equal(slice2outax(1, (None, 1, None)), (None,)) - assert_equal(slice2outax(1, (None, sn)), (1,)) - assert_equal(slice2outax(2, (sn,)), (0, 1)) - assert_equal(slice2outax(2, (sn, sn)), (0, 1)) - assert_equal(slice2outax(2, (1,)), (None, 0)) - assert_equal(slice2outax(2, (sn, 1)), (0, None)) - assert_equal(slice2outax(2, (None,)), (1, 2)) - assert_equal(slice2outax(2, (None, 1)), (None, 1)) - assert_equal(slice2outax(2, (None, 1, None)), (None, 2)) - assert_equal(slice2outax(2, (None, 1, None, 2)), (None, None)) - assert_equal(slice2outax(2, (None, sn, None, 1)), (1, None)) - assert_equal(slice2outax(3, (sn,)), (0, 1, 2)) - assert_equal(slice2outax(3, (sn, sn)), (0, 1, 2)) - assert_equal(slice2outax(3, (sn, None, sn)), (0, 2, 3)) - assert_equal(slice2outax(3, (sn, None, sn, None, sn)), (0, 2, 4)) - assert_equal(slice2outax(3, (1,)), (None, 0, 1)) - assert_equal(slice2outax(3, (None, sn, None, 1)), (1, None, 3)) + assert slice2outax(1, (sn,)) == (0,) + assert slice2outax(1, (1,)) == (None,) + assert slice2outax(1, (None,)) == (1,) + assert slice2outax(1, (None, 1)) == (None,) + assert slice2outax(1, (None, 1, None)) == (None,) + assert slice2outax(1, (None, sn)) == (1,) + assert slice2outax(2, (sn,)) == (0, 1) + assert slice2outax(2, (sn, sn)) == (0, 1) + assert slice2outax(2, (1,)) == (None, 0) + assert slice2outax(2, (sn, 1)) == (0, None) + assert slice2outax(2, (None,)) == (1, 2) + assert slice2outax(2, (None, 1)) == (None, 1) + assert slice2outax(2, (None, 1, None)) == (None, 2) + assert slice2outax(2, (None, 1, None, 2)) == (None, None) + assert slice2outax(2, (None, sn, None, 1)) == (1, None) + assert slice2outax(3, (sn,)) == (0, 1, 2) + assert slice2outax(3, (sn, sn)) == (0, 1, 2) + assert slice2outax(3, (sn, None, sn)) == (0, 2, 3) + assert slice2outax(3, (sn, None, sn, None, sn)) == (0, 2, 4) + assert slice2outax(3, (1,)) == (None, 0, 1) + assert slice2outax(3, (None, sn, None, 1)) == (1, None, 3) def _slices_for_len(L): @@ -174,108 +175,108 @@ def _slices_for_len(L): def test_slice2len(): # Test slice length calculation - assert_equal(slice2len(slice(None), 10), 10) - assert_equal(slice2len(slice(11), 10), 10) - assert_equal(slice2len(slice(1, 11), 10), 9) - assert_equal(slice2len(slice(1, 1), 10), 0) - assert_equal(slice2len(slice(1, 11, 2), 10), 5) - assert_equal(slice2len(slice(0, 11, 3), 10), 4) - assert_equal(slice2len(slice(1, 11, 3), 10), 3) - assert_equal(slice2len(slice(None, None, -1), 10), 10) - assert_equal(slice2len(slice(11, None, -1), 10), 10) - assert_equal(slice2len(slice(None, 1, -1), 10), 8) - assert_equal(slice2len(slice(None, None, -2), 10), 5) - assert_equal(slice2len(slice(None, None, -3), 10), 4) - assert_equal(slice2len(slice(None, 0, -3), 10), 3) + assert slice2len(slice(None), 10) == 10 + assert slice2len(slice(11), 10) == 10 + assert slice2len(slice(1, 11), 10) == 9 + assert slice2len(slice(1, 1), 10) == 0 + assert slice2len(slice(1, 11, 2), 10) == 5 + assert slice2len(slice(0, 11, 3), 10) == 4 + assert slice2len(slice(1, 11, 3), 10) == 3 + assert slice2len(slice(None, None, -1), 10) == 10 + assert slice2len(slice(11, None, -1), 10) == 10 + assert slice2len(slice(None, 1, -1), 10) == 8 + assert slice2len(slice(None, None, -2), 10) == 5 + assert slice2len(slice(None, None, -3), 10) == 4 + assert slice2len(slice(None, 0, -3), 10) == 3 # Start, end are always taken to be relative if negative - assert_equal(slice2len(slice(None, -4, -1), 10), 3) - assert_equal(slice2len(slice(-4, -2, 1), 10), 2) + assert slice2len(slice(None, -4, -1), 10) == 3 + assert slice2len(slice(-4, -2, 1), 10) == 2 # start after stop - assert_equal(slice2len(slice(3, 2, 1), 10), 0) - assert_equal(slice2len(slice(2, 3, -1), 10), 0) + assert slice2len(slice(3, 2, 1), 10) == 0 + assert slice2len(slice(2, 3, -1), 10) == 0 def test_fill_slicer(): # Test slice length calculation - assert_equal(fill_slicer(slice(None), 10), slice(0, 10, 1)) - assert_equal(fill_slicer(slice(11), 10), slice(0, 10, 1)) - assert_equal(fill_slicer(slice(1, 11), 10), slice(1, 10, 1)) - assert_equal(fill_slicer(slice(1, 1), 10), slice(1, 1, 1)) - assert_equal(fill_slicer(slice(1, 11, 2), 10), slice(1, 10, 2)) - assert_equal(fill_slicer(slice(0, 11, 3), 10), slice(0, 10, 3)) - assert_equal(fill_slicer(slice(1, 11, 3), 10), slice(1, 10, 3)) - assert_equal(fill_slicer(slice(None, None, -1), 10), + assert fill_slicer(slice(None), 10) == slice(0, 10, 1) + assert fill_slicer(slice(11), 10) == slice(0, 10, 1) + assert fill_slicer(slice(1, 11), 10) == slice(1, 10, 1) + assert fill_slicer(slice(1, 1), 10) == slice(1, 1, 1) + assert fill_slicer(slice(1, 11, 2), 10) == slice(1, 10, 2) + assert fill_slicer(slice(0, 11, 3), 10) == slice(0, 10, 3) + assert fill_slicer(slice(1, 11, 3), 10) == slice(1, 10, 3) + assert (fill_slicer(slice(None, None, -1), 10) == slice(9, None, -1)) - assert_equal(fill_slicer(slice(11, None, -1), 10), + assert (fill_slicer(slice(11, None, -1), 10) == slice(9, None, -1)) - assert_equal(fill_slicer(slice(None, 1, -1), 10), + assert (fill_slicer(slice(None, 1, -1), 10) == slice(9, 1, -1)) - assert_equal(fill_slicer(slice(None, None, -2), 10), + assert (fill_slicer(slice(None, None, -2), 10) == slice(9, None, -2)) - assert_equal(fill_slicer(slice(None, None, -3), 10), + assert (fill_slicer(slice(None, None, -3), 10) == slice(9, None, -3)) - assert_equal(fill_slicer(slice(None, 0, -3), 10), + assert (fill_slicer(slice(None, 0, -3), 10) == slice(9, 0, -3)) # Start, end are always taken to be relative if negative - assert_equal(fill_slicer(slice(None, -4, -1), 10), + assert (fill_slicer(slice(None, -4, -1), 10) == slice(9, 6, -1)) - assert_equal(fill_slicer(slice(-4, -2, 1), 10), + assert (fill_slicer(slice(-4, -2, 1), 10) == slice(6, 8, 1)) # start after stop - assert_equal(fill_slicer(slice(3, 2, 1), 10), + assert (fill_slicer(slice(3, 2, 1), 10) == slice(3, 2, 1)) - assert_equal(fill_slicer(slice(2, 3, -1), 10), + assert (fill_slicer(slice(2, 3, -1), 10) == slice(2, 3, -1)) def test__positive_slice(): # Reverse slice direction to be positive - assert_equal(_positive_slice(slice(0, 5, 1)), slice(0, 5, 1)) - assert_equal(_positive_slice(slice(1, 5, 3)), slice(1, 5, 3)) - assert_equal(_positive_slice(slice(4, None, -2)), slice(0, 5, 2)) - assert_equal(_positive_slice(slice(4, None, -1)), slice(0, 5, 1)) - assert_equal(_positive_slice(slice(4, 1, -1)), slice(2, 5, 1)) - assert_equal(_positive_slice(slice(4, 1, -2)), slice(2, 5, 2)) + assert _positive_slice(slice(0, 5, 1)) == slice(0, 5, 1) + assert _positive_slice(slice(1, 5, 3)) == slice(1, 5, 3) + assert _positive_slice(slice(4, None, -2)) == slice(0, 5, 2) + assert _positive_slice(slice(4, None, -1)) == slice(0, 5, 1) + assert _positive_slice(slice(4, 1, -1)) == slice(2, 5, 1) + assert _positive_slice(slice(4, 1, -2)) == slice(2, 5, 2) def test_threshold_heuristic(): # Test for default skip / read heuristic # int - assert_equal(threshold_heuristic(1, 9, 1, skip_thresh=8), 'full') - assert_equal(threshold_heuristic(1, 9, 1, skip_thresh=7), None) - assert_equal(threshold_heuristic(1, 9, 2, skip_thresh=16), 'full') - assert_equal(threshold_heuristic(1, 9, 2, skip_thresh=15), None) + assert threshold_heuristic(1, 9, 1, skip_thresh=8) == 'full' + assert threshold_heuristic(1, 9, 1, skip_thresh=7) == None + assert threshold_heuristic(1, 9, 2, skip_thresh=16) == 'full' + assert threshold_heuristic(1, 9, 2, skip_thresh=15) == None # full slice, smallest step size - assert_equal(threshold_heuristic( - slice(0, 9, 1), 9, 2, skip_thresh=2), + assert (threshold_heuristic( + slice(0, 9, 1), 9, 2, skip_thresh=2) == 'full') # Dropping skip thresh below step size gives None - assert_equal(threshold_heuristic( - slice(0, 9, 1), 9, 2, skip_thresh=1), + assert (threshold_heuristic( + slice(0, 9, 1), 9, 2, skip_thresh=1) == None) # As does increasing step size - assert_equal(threshold_heuristic( - slice(0, 9, 2), 9, 2, skip_thresh=3), + assert (threshold_heuristic( + slice(0, 9, 2), 9, 2, skip_thresh=3) == None) # Negative step size same as positive - assert_equal(threshold_heuristic( - slice(9, None, -1), 9, 2, skip_thresh=2), + assert (threshold_heuristic( + slice(9, None, -1), 9, 2, skip_thresh=2) == 'full') # Add a gap between start and end. Now contiguous because of step size - assert_equal(threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=2), + assert (threshold_heuristic( + slice(2, 9, 1), 9, 2, skip_thresh=2) == 'contiguous') # To not-contiguous, even with step size 1 - assert_equal(threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=1), + assert (threshold_heuristic( + slice(2, 9, 1), 9, 2, skip_thresh=1) == None) # Back to full when skip covers gap - assert_equal(threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=4), + assert (threshold_heuristic( + slice(2, 9, 1), 9, 2, skip_thresh=4) == 'full') # Until it doesn't cover the gap - assert_equal(threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=3), + assert (threshold_heuristic( + slice(2, 9, 1), 9, 2, skip_thresh=3) == 'contiguous') @@ -307,226 +308,233 @@ def test_optimize_slicer(): for is_slowest in (True, False): # following tests not affected by all_full or optimization # full - always passes through - assert_equal( - optimize_slicer(slice(None), 10, all_full, 4, heuristic), + assert ( + optimize_slicer(slice(None), 10, all_full, 4, heuristic) == (slice(None), slice(None))) # Even if full specified with explicit values - assert_equal( - optimize_slicer(slice(10), 10, all_full, 4, heuristic), + assert ( + optimize_slicer(slice(10), 10, all_full, 4, heuristic) == (slice(None), slice(None))) - assert_equal( - optimize_slicer(slice(0, 10), 10, all_full, 4, heuristic), + assert ( + optimize_slicer(slice(0, 10), 10, all_full, 4, heuristic) == (slice(None), slice(None))) - assert_equal( - optimize_slicer(slice(0, 10, 1), 10, all_full, 4, heuristic), + assert ( + optimize_slicer(slice(0, 10, 1), 10, all_full, 4, heuristic) == (slice(None), slice(None))) # Reversed full is still full, but with reversed post_slice - assert_equal( + assert ( optimize_slicer( - slice(None, None, -1), 10, all_full, 4, heuristic), + slice(None, None, -1), 10, all_full, 4, heuristic) == (slice(None), slice(None, None, -1))) # Contiguous is contiguous unless heuristic kicks in, in which case it may # be 'full' - assert_equal( - optimize_slicer(slice(9), 10, False, False, 4, _always), + assert ( + optimize_slicer(slice(9), 10, False, False, 4, _always) == (slice(0, 9, 1), slice(None))) - assert_equal( - optimize_slicer(slice(9), 10, True, False, 4, _always), + assert ( + optimize_slicer(slice(9), 10, True, False, 4, _always) == (slice(None), slice(0, 9, 1))) # Unless this is the slowest dimenion, and all_true is True, in which case # we don't update to full - assert_equal( - optimize_slicer(slice(9), 10, True, True, 4, _always), + assert ( + optimize_slicer(slice(9), 10, True, True, 4, _always) == (slice(0, 9, 1), slice(None))) # Nor if the heuristic won't update - assert_equal( - optimize_slicer(slice(9), 10, True, False, 4, _never), + assert ( + optimize_slicer(slice(9), 10, True, False, 4, _never) == (slice(0, 9, 1), slice(None))) - assert_equal( - optimize_slicer(slice(1, 10), 10, True, False, 4, _never), + assert ( + optimize_slicer(slice(1, 10), 10, True, False, 4, _never) == (slice(1, 10, 1), slice(None))) # Reversed contiguous still contiguous - assert_equal( - optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never), + assert ( + optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never) == (slice(0, 9, 1), slice(None, None, -1))) - assert_equal( - optimize_slicer(slice(8, None, -1), 10, True, False, 4, _always), + assert ( + optimize_slicer(slice(8, None, -1), 10, True, False, 4, _always) == (slice(None), slice(8, None, -1))) - assert_equal( - optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never), + assert ( + optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never) == (slice(0, 9, 1), slice(None, None, -1))) - assert_equal( - optimize_slicer(slice(9, 0, -1), 10, False, False, 4, _never), + assert ( + optimize_slicer(slice(9, 0, -1), 10, False, False, 4, _never) == (slice(1, 10, 1), slice(None, None, -1))) # Non-contiguous - assert_equal( - optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _never), + assert ( + optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _never) == (slice(0, 10, 2), slice(None))) # all_full triggers optimization, but optimization does nothing - assert_equal( - optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _never), + assert ( + optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _never) == (slice(0, 10, 2), slice(None))) # all_full triggers optimization, optimization does something - assert_equal( - optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always), + assert ( + optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always) == (slice(None), slice(0, 10, 2))) # all_full disables optimization, optimization does something - assert_equal( - optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _always), + assert ( + optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _always) == (slice(0, 10, 2), slice(None))) # Non contiguous, reversed - assert_equal( - optimize_slicer(slice(10, None, -2), 10, False, False, 4, _never), + assert ( + optimize_slicer(slice(10, None, -2), 10, False, False, 4, _never) == (slice(1, 10, 2), slice(None, None, -1))) - assert_equal( - optimize_slicer(slice(10, None, -2), 10, True, False, 4, _always), + assert ( + optimize_slicer(slice(10, None, -2), 10, True, False, 4, _always) == (slice(None), slice(9, None, -2))) # Short non-contiguous - assert_equal( - optimize_slicer(slice(2, 8, 2), 10, False, False, 4, _never), + assert ( + optimize_slicer(slice(2, 8, 2), 10, False, False, 4, _never) == (slice(2, 8, 2), slice(None))) # with partial read - assert_equal( - optimize_slicer(slice(2, 8, 2), 10, True, False, 4, _partial), + assert ( + optimize_slicer(slice(2, 8, 2), 10, True, False, 4, _partial) == (slice(2, 8, 1), slice(None, None, 2))) # If this is the slowest changing dimension, heuristic can upgrade None to # contiguous, but not (None, contiguous) to full - assert_equal( # we've done this one already - optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always), - (slice(None), slice(0, 10, 2))) - assert_equal( # if slowest, just upgrade to contiguous - optimize_slicer(slice(0, 10, 2), 10, True, True, 4, _always), + # we've done this one already + assert optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always) \ + == (slice(None), slice(0, 10, 2)) + # if slowest, just upgrade to contiguous + assert ( + optimize_slicer(slice(0, 10, 2), 10, True, True, 4, _always) == (slice(0, 10, 1), slice(None, None, 2))) - assert_equal( # contiguous does not upgrade to full - optimize_slicer(slice(9), 10, True, True, 4, _always), + # contiguous does not upgrade to full + assert ( + optimize_slicer(slice(9), 10, True, True, 4, _always) == (slice(0, 9, 1), slice(None))) # integer - assert_equal( - optimize_slicer(0, 10, True, False, 4, _never), + assert ( + optimize_slicer(0, 10, True, False, 4, _never) == (0, 'dropped')) - assert_equal( # can be negative - optimize_slicer(-1, 10, True, False, 4, _never), + # can be negative + assert ( + optimize_slicer(-1, 10, True, False, 4, _never) == (9, 'dropped')) - assert_equal( # or float - optimize_slicer(0.9, 10, True, False, 4, _never), + # or float + assert ( + optimize_slicer(0.9, 10, True, False, 4, _never) == (0, 'dropped')) - assert_raises(ValueError, # should never get 'contiguous' - optimize_slicer, 0, 10, True, False, 4, _partial) - assert_equal( # full can be forced with heuristic - optimize_slicer(0, 10, True, False, 4, _always), + # should never get 'contiguous' + with pytest.raises(ValueError): + optimize_slicer(0, 10, True, False, 4, _partial) + # full can be forced with heuristic + assert ( + optimize_slicer(0, 10, True, False, 4, _always) == (slice(None), 0)) - assert_equal( # but disabled for slowest changing dimension - optimize_slicer(0, 10, True, True, 4, _always), + # but disabled for slowest changing dimension + assert ( + optimize_slicer(0, 10, True, True, 4, _always) == (0, 'dropped')) def test_optimize_read_slicers(): # Test function to optimize read slicers - assert_equal(optimize_read_slicers((1,), (10,), 4, _never), + assert (optimize_read_slicers((1,), (10,), 4, _never) == ((1,), ())) - assert_equal(optimize_read_slicers((slice(None),), (10,), 4, _never), + assert (optimize_read_slicers((slice(None),), (10,), 4, _never) == ((slice(None),), (slice(None),))) - assert_equal(optimize_read_slicers((slice(9),), (10,), 4, _never), + assert (optimize_read_slicers((slice(9),), (10,), 4, _never) == ((slice(0, 9, 1),), (slice(None),))) # optimize cannot update a continuous to a full if last - assert_equal(optimize_read_slicers((slice(9),), (10,), 4, _always), + assert (optimize_read_slicers((slice(9),), (10,), 4, _always) == ((slice(0, 9, 1),), (slice(None),))) # optimize can update non-contiguous to continuous even if last # not optimizing - assert_equal(optimize_read_slicers((slice(0, 9, 2),), (10,), 4, _never), + assert (optimize_read_slicers((slice(0, 9, 2),), (10,), 4, _never) == ((slice(0, 9, 2),), (slice(None),))) # optimizing - assert_equal(optimize_read_slicers((slice(0, 9, 2),), (10,), 4, _always), + assert (optimize_read_slicers((slice(0, 9, 2),), (10,), 4, _always) == ((slice(0, 9, 1),), (slice(None, None, 2),))) # Optimize does nothing for integer when last - assert_equal(optimize_read_slicers((1,), (10,), 4, _always), + assert (optimize_read_slicers((1,), (10,), 4, _always) == ((1,), ())) # 2D - assert_equal(optimize_read_slicers( - (slice(None), slice(None)), (10, 6), 4, _never), + assert (optimize_read_slicers( + (slice(None), slice(None)), (10, 6), 4, _never) == ((slice(None), slice(None)), (slice(None), slice(None)))) - assert_equal(optimize_read_slicers((slice(None), 1), (10, 6), 4, _never), + assert (optimize_read_slicers((slice(None), 1), (10, 6), 4, _never) == ((slice(None), 1), (slice(None),))) - assert_equal(optimize_read_slicers((1, slice(None)), (10, 6), 4, _never), + assert (optimize_read_slicers((1, slice(None)), (10, 6), 4, _never) == ((1, slice(None)), (slice(None),))) # Not optimizing a partial slice - assert_equal(optimize_read_slicers( - (slice(9), slice(None)), (10, 6), 4, _never), + assert (optimize_read_slicers( + (slice(9), slice(None)), (10, 6), 4, _never) == ((slice(0, 9, 1), slice(None)), (slice(None), slice(None)))) # Optimizing a partial slice - assert_equal(optimize_read_slicers( - (slice(9), slice(None)), (10, 6), 4, _always), + assert (optimize_read_slicers( + (slice(9), slice(None)), (10, 6), 4, _always) == ((slice(None), slice(None)), (slice(0, 9, 1), slice(None)))) # Optimize cannot update a continuous to a full if last - assert_equal(optimize_read_slicers( - (slice(None), slice(5)), (10, 6), 4, _always), + assert (optimize_read_slicers( + (slice(None), slice(5)), (10, 6), 4, _always) == ((slice(None), slice(0, 5, 1)), (slice(None), slice(None)))) # optimize can update non-contiguous to full if not last # not optimizing - assert_equal(optimize_read_slicers( - (slice(0, 9, 3), slice(None)), (10, 6), 4, _never), + assert (optimize_read_slicers( + (slice(0, 9, 3), slice(None)), (10, 6), 4, _never) == ((slice(0, 9, 3), slice(None)), (slice(None), slice(None)))) # optimizing full - assert_equal(optimize_read_slicers( - (slice(0, 9, 3), slice(None)), (10, 6), 4, _always), + assert (optimize_read_slicers( + (slice(0, 9, 3), slice(None)), (10, 6), 4, _always) == ((slice(None), slice(None)), (slice(0, 9, 3), slice(None)))) # optimizing partial - assert_equal(optimize_read_slicers( - (slice(0, 9, 3), slice(None)), (10, 6), 4, _partial), + assert (optimize_read_slicers( + (slice(0, 9, 3), slice(None)), (10, 6), 4, _partial) == ((slice(0, 9, 1), slice(None)), (slice(None, None, 3), slice(None)))) # optimize can update non-contiguous to continuous even if last # not optimizing - assert_equal(optimize_read_slicers( - (slice(None), slice(0, 5, 2)), (10, 6), 4, _never), + assert (optimize_read_slicers( + (slice(None), slice(0, 5, 2)), (10, 6), 4, _never) == ((slice(None), slice(0, 5, 2)), (slice(None), slice(None)))) # optimizing - assert_equal(optimize_read_slicers( - (slice(None), slice(0, 5, 2),), (10, 6), 4, _always), + assert (optimize_read_slicers( + (slice(None), slice(0, 5, 2),), (10, 6), 4, _always) == ((slice(None), slice(0, 5, 1)), (slice(None), slice(None, None, 2)))) # Optimize does nothing for integer when last - assert_equal(optimize_read_slicers( - (slice(None), 1), (10, 6), 4, _always), + assert (optimize_read_slicers( + (slice(None), 1), (10, 6), 4, _always) == ((slice(None), 1), (slice(None),))) # Check gap threshold with 3D _depends0 = partial(threshold_heuristic, skip_thresh=10 * 4 - 1) _depends1 = partial(threshold_heuristic, skip_thresh=10 * 4) - assert_equal(optimize_read_slicers( - (slice(9), slice(None), slice(None)), (10, 6, 2), 4, _depends0), + assert (optimize_read_slicers( + (slice(9), slice(None), slice(None)), (10, 6, 2), 4, _depends0) == ((slice(None), slice(None), slice(None)), (slice(0, 9, 1), slice(None), slice(None)))) - assert_equal(optimize_read_slicers( - (slice(None), slice(5), slice(None)), (10, 6, 2), 4, _depends0), + assert (optimize_read_slicers( + (slice(None), slice(5), slice(None)), (10, 6, 2), 4, _depends0) == ((slice(None), slice(0, 5, 1), slice(None)), (slice(None), slice(None), slice(None)))) - assert_equal(optimize_read_slicers( - (slice(None), slice(5), slice(None)), (10, 6, 2), 4, _depends1), + assert (optimize_read_slicers( + (slice(None), slice(5), slice(None)), (10, 6, 2), 4, _depends1) == ((slice(None), slice(None), slice(None)), (slice(None), slice(0, 5, 1), slice(None)))) # Check longs as integer slices sn = slice(None) - assert_equal(optimize_read_slicers( - (1, 2, 3), (2, 3, 4), 4, _always), + assert (optimize_read_slicers( + (1, 2, 3), (2, 3, 4), 4, _always) == ((sn, sn, 3), (1, 2))) def test_slicers2segments(): # Test function to construct segments from slice objects - assert_equal(slicers2segments((0,), (10,), 7, 4), + assert (slicers2segments((0,), (10,), 7, 4) == [[7, 4]]) - assert_equal(slicers2segments((0, 1), (10, 6), 7, 4), + assert (slicers2segments((0, 1), (10, 6), 7, 4) == [[7 + 10 * 4, 4]]) - assert_equal(slicers2segments((0, 1, 2), (10, 6, 4), 7, 4), + assert (slicers2segments((0, 1, 2), (10, 6, 4), 7, 4) == [[7 + 10 * 4 + 10 * 6 * 2 * 4, 4]]) - assert_equal(slicers2segments((slice(None),), (10,), 7, 4), + assert (slicers2segments((slice(None),), (10,), 7, 4) == [[7, 10 * 4]]) - assert_equal(slicers2segments((0, slice(None)), (10, 6), 7, 4), + assert (slicers2segments((0, slice(None)), (10, 6), 7, 4) == [[7 + 10 * 4 * i, 4] for i in range(6)]) - assert_equal(slicers2segments((slice(None), 0), (10, 6), 7, 4), + assert (slicers2segments((slice(None), 0), (10, 6), 7, 4) == [[7, 10 * 4]]) - assert_equal(slicers2segments((slice(None), slice(None)), (10, 6), 7, 4), + assert (slicers2segments((slice(None), slice(None)), (10, 6), 7, 4) == [[7, 10 * 6 * 4]]) - assert_equal(slicers2segments( - (slice(None), slice(None), 2), (10, 6, 4), 7, 4), + assert (slicers2segments( + (slice(None), slice(None), 2), (10, 6, 4), 7, 4) == [[7 + 10 * 6 * 2 * 4, 10 * 6 * 4]]) @@ -535,71 +543,71 @@ def test_calc_slicedefs(): # wrote them after the code. We live and (fail to) learn segments, out_shape, new_slicing = calc_slicedefs( (1,), (10,), 4, 7, 'F', _never) - assert_equal(segments, [[11, 4]]) - assert_equal(new_slicing, ()) - assert_equal(out_shape, ()) - assert_equal( - calc_slicedefs((slice(None),), (10,), 4, 7, 'F', _never), + assert segments == [[11, 4]] + assert new_slicing == () + assert out_shape == () + assert ( + calc_slicedefs((slice(None),), (10,), 4, 7, 'F', _never) == ([[7, 40]], (10,), (), )) - assert_equal( - calc_slicedefs((slice(9),), (10,), 4, 7, 'F', _never), + assert ( + calc_slicedefs((slice(9),), (10,), 4, 7, 'F', _never) == ([[7, 36]], (9,), (), )) - assert_equal( - calc_slicedefs((slice(1, 9),), (10,), 4, 7, 'F', _never), + assert ( + calc_slicedefs((slice(1, 9),), (10,), 4, 7, 'F', _never) == ([[11, 32]], (8,), (), )) # Two dimensions, single slice - assert_equal( - calc_slicedefs((0,), (10, 6), 4, 7, 'F', _never), + assert ( + calc_slicedefs((0,), (10, 6), 4, 7, 'F', _never) == ([[7, 4], [47, 4], [87, 4], [127, 4], [167, 4], [207, 4]], (6,), (), )) - assert_equal( - calc_slicedefs((0,), (10, 6), 4, 7, 'C', _never), + assert ( + calc_slicedefs((0,), (10, 6), 4, 7, 'C', _never) == ([[7, 6 * 4]], (6,), (), )) # Two dimensions, contiguous not full - assert_equal( - calc_slicedefs((1, slice(1, 5)), (10, 6), 4, 7, 'F', _never), + assert ( + calc_slicedefs((1, slice(1, 5)), (10, 6), 4, 7, 'F', _never) == ([[51, 4], [91, 4], [131, 4], [171, 4]], (4,), (), )) - assert_equal( - calc_slicedefs((1, slice(1, 5)), (10, 6), 4, 7, 'C', _never), + assert ( + calc_slicedefs((1, slice(1, 5)), (10, 6), 4, 7, 'C', _never) == ([[7 + 7 * 4, 16]], (4,), (), )) # With full slice first - assert_equal( - calc_slicedefs((slice(None), slice(1, 5)), (10, 6), 4, 7, 'F', _never), + assert ( + calc_slicedefs((slice(None), slice(1, 5)), (10, 6), 4, 7, 'F', _never) == ([[47, 160]], (10, 4), (), )) # Check effect of heuristic on calc_slicedefs # Even integer slices can generate full when heuristic says so - assert_equal( - calc_slicedefs((1, slice(None)), (10, 6), 4, 7, 'F', _always), + assert ( + calc_slicedefs((1, slice(None)), (10, 6), 4, 7, 'F', _always) == ([[7, 10 * 6 * 4]], (10, 6), (1, slice(None)), )) # Except when last - assert_equal( - calc_slicedefs((slice(None), 1), (10, 6), 4, 7, 'F', _always), + assert ( + calc_slicedefs((slice(None), 1), (10, 6), 4, 7, 'F', _always) == ([[7 + 10 * 4, 10 * 4]], (10,), (), @@ -615,17 +623,17 @@ def test_predict_shape(): for i in range(n_dim): slicers_list.append(_slices_for_len(shape[i])) for sliceobj in product(*slicers_list): - assert_equal(predict_shape(sliceobj, shape), + assert (predict_shape(sliceobj, shape) == arr[sliceobj].shape) # Try some Nones and ellipses - assert_equal(predict_shape((Ellipsis,), (2, 3)), (2, 3)) - assert_equal(predict_shape((Ellipsis, 1), (2, 3)), (2,)) - assert_equal(predict_shape((1, Ellipsis), (2, 3)), (3,)) - assert_equal(predict_shape((1, slice(None), Ellipsis), (2, 3)), (3,)) - assert_equal(predict_shape((None,), (2, 3)), (1, 2, 3)) - assert_equal(predict_shape((None, 1), (2, 3)), (1, 3)) - assert_equal(predict_shape((1, None, slice(None)), (2, 3)), (1, 3)) - assert_equal(predict_shape((1, slice(None), None), (2, 3)), (3, 1)) + assert predict_shape((Ellipsis,), (2, 3)) == (2, 3) + assert predict_shape((Ellipsis, 1), (2, 3)) == (2,) + assert predict_shape((1, Ellipsis), (2, 3)) == (3,) + assert predict_shape((1, slice(None), Ellipsis), (2, 3)) == (3,) + assert predict_shape((None,), (2, 3)) == (1, 2, 3) + assert predict_shape((None, 1), (2, 3)) == (1, 3) + assert predict_shape((1, None, slice(None)), (2, 3)) == (1, 3) + assert predict_shape((1, slice(None), None), (2, 3)) == (3, 1) def test_strided_scalar(): @@ -636,18 +644,19 @@ def test_strided_scalar(): expected = np.zeros(shape, dtype=np.array(scalar).dtype) + scalar observed = strided_scalar(shape, scalar) assert_array_equal(observed, expected) - assert_equal(observed.shape, shape) - assert_equal(observed.dtype, expected.dtype) + assert observed.shape == shape + assert observed.dtype == expected.dtype assert_array_equal(observed.strides, 0) # Strided scalars are set as not writeable # This addresses a numpy 1.10 breakage of broadcasting a strided # array without resizing (see GitHub PR #358) - assert_false(observed.flags.writeable) + assert not observed.flags.writeable def setval(x): x[..., 0] = 99 # RuntimeError for numpy < 1.10 - assert_raises((RuntimeError, ValueError), setval, observed) + with pytest.raises((RuntimeError, ValueError)): + setval(observed) # Default scalar value is 0 assert_array_equal(strided_scalar((2, 3, 4)), np.zeros((2, 3, 4))) @@ -670,9 +679,12 @@ def test_read_segments(): np.r_[arr[5:25], arr[50:75]]) _check_bytes(read_segments(fobj, [], 0), arr[0:0]) # Error conditions - assert_raises(ValueError, read_segments, fobj, [], 1) - assert_raises(ValueError, read_segments, fobj, [(0, 200)], 199) - assert_raises(Exception, read_segments, fobj, [(0, 100), (100, 200)], 199) + with pytest.raises(ValueError): + read_segments(fobj, [], 1) + with pytest.raises(ValueError): + read_segments(fobj, [(0, 200)], 199) + with pytest.raises(Exception): + read_segments(fobj, [(0, 100), (100, 200)], 199) def test_read_segments_lock(): @@ -798,8 +810,8 @@ def test_fileslice_errors(): fobj = BytesIO(arr.tostring()) _check_slicer((1,), arr, fobj, 0, 'C') # Fancy indexing raises error - assert_raises(ValueError, - fileslice, fobj, (np.array(1),), (2, 3, 4), arr.dtype) + with pytest.raises(ValueError): + fileslice(fobj, (np.array(1),), (2, 3, 4), arr.dtype) def test_fileslice_heuristic(): diff --git a/nibabel/tests/test_fileutils.py b/nibabel/tests/test_fileutils.py index 63ecc8ee34..edc8384d4d 100644 --- a/nibabel/tests/test_fileutils.py +++ b/nibabel/tests/test_fileutils.py @@ -12,12 +12,7 @@ from ..fileutils import read_zt_byte_strings -from numpy.testing import (assert_almost_equal, - assert_array_equal) - -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) - +import pytest from ..tmpdirs import InTemporaryDirectory @@ -35,22 +30,24 @@ def test_read_zt_byte_strings(): # open it again fread = open(path, 'rb') # test readout of one string - assert_equal(read_zt_byte_strings(fread), [b'test.fmr']) + assert read_zt_byte_strings(fread) == [b'test.fmr'] # test new file position - assert_equal(fread.tell(), 9) + assert fread.tell() == 9 # manually rewind fread.seek(0) # test readout of two strings - assert_equal(read_zt_byte_strings(fread, 2), + assert (read_zt_byte_strings(fread, 2) == [b'test.fmr', b'test.prt']) - assert_equal(fread.tell(), 18) + assert fread.tell() == 18 # test readout of more strings than present fread.seek(0) - assert_raises(ValueError, read_zt_byte_strings, fread, 3) + with pytest.raises(ValueError): + read_zt_byte_strings(fread, 3) fread.seek(9) - assert_raises(ValueError, read_zt_byte_strings, fread, 2) + with pytest.raises(ValueError): + read_zt_byte_strings(fread, 2) # Try with a small bufsize fread.seek(0) - assert_equal(read_zt_byte_strings(fread, 2, 4), + assert (read_zt_byte_strings(fread, 2, 4) == [b'test.fmr', b'test.prt']) fread.close() diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index 0c4b5a8cb3..94d3a396b2 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -2,7 +2,6 @@ """ import sys -from distutils.version import LooseVersion import numpy as np @@ -10,10 +9,9 @@ int_to_float, floor_log2, type_info, _check_nmant, _check_maxexp, ok_floats, on_powerpc, have_binary128, longdouble_precision_improved) -from ..testing import suppress_warnings +from ..testing_pytest import suppress_warnings -from nose import SkipTest -from nose.tools import assert_equal, assert_raises, assert_true, assert_false +import pytest IEEE_floats = [np.float16, np.float32, np.float64] @@ -35,17 +33,17 @@ def test_type_info(): for dtt in np.sctypes['int'] + np.sctypes['uint']: info = np.iinfo(dtt) infod = type_info(dtt) - assert_equal(dict(min=info.min, max=info.max, + assert dict(min=info.min, max=info.max, nexp=None, nmant=None, minexp=None, maxexp=None, - width=np.dtype(dtt).itemsize), infod) - assert_equal(infod['min'].dtype.type, dtt) - assert_equal(infod['max'].dtype.type, dtt) + width=np.dtype(dtt).itemsize) == infod + assert infod['min'].dtype.type == dtt + assert infod['max'].dtype.type == dtt for dtt in IEEE_floats + [np.complex64, np.complex64]: infod = type_info(dtt) - assert_equal(dtt2dict(dtt), infod) - assert_equal(infod['min'].dtype.type, dtt) - assert_equal(infod['max'].dtype.type, dtt) + assert dtt2dict(dtt) == infod + assert infod['min'].dtype.type == dtt + assert infod['max'].dtype.type == dtt # What is longdouble? ld_dict = dtt2dict(np.longdouble) dbl_dict = dtt2dict(np.float64) @@ -70,14 +68,14 @@ def test_type_info(): ld_dict['width'] = width else: raise ValueError("Unexpected float type {} to test".format(np.longdouble)) - assert_equal(ld_dict, infod) + assert ld_dict == infod def test_nmant(): for t in IEEE_floats: - assert_equal(type_info(t)['nmant'], np.finfo(t).nmant) + assert type_info(t)['nmant'] == np.finfo(t).nmant if (LD_INFO['nmant'], LD_INFO['nexp']) == (63, 15): - assert_equal(type_info(np.longdouble)['nmant'], 63) + assert type_info(np.longdouble)['nmant'] == 63 def test_check_nmant_nexp(): @@ -85,35 +83,37 @@ def test_check_nmant_nexp(): for t in IEEE_floats: nmant = np.finfo(t).nmant maxexp = np.finfo(t).maxexp - assert_true(_check_nmant(t, nmant)) - assert_false(_check_nmant(t, nmant - 1)) - assert_false(_check_nmant(t, nmant + 1)) + assert _check_nmant(t, nmant) + assert not _check_nmant(t, nmant - 1) + assert not _check_nmant(t, nmant + 1) with suppress_warnings(): # overflow - assert_true(_check_maxexp(t, maxexp)) - assert_false(_check_maxexp(t, maxexp - 1)) + assert _check_maxexp(t, maxexp) + assert not _check_maxexp(t, maxexp - 1) with suppress_warnings(): - assert_false(_check_maxexp(t, maxexp + 1)) + assert not _check_maxexp(t, maxexp + 1) # Check against type_info for t in ok_floats(): ti = type_info(t) if ti['nmant'] not in (105, 106): # This check does not work for PPC double pair - assert_true(_check_nmant(t, ti['nmant'])) + assert _check_nmant(t, ti['nmant']) # Test fails for longdouble after blacklisting of OSX powl as of numpy # 1.12 - see https://github.com/numpy/numpy/issues/8307 if t != np.longdouble or sys.platform != 'darwin': - assert_true(_check_maxexp(t, ti['maxexp'])) + assert _check_maxexp(t, ti['maxexp']) def test_as_int(): # Integer representation of number - assert_equal(as_int(2.0), 2) - assert_equal(as_int(-2.0), -2) - assert_raises(FloatingError, as_int, 2.1) - assert_raises(FloatingError, as_int, -2.1) - assert_equal(as_int(2.1, False), 2) - assert_equal(as_int(-2.1, False), -2) + assert as_int(2.0) == 2 + assert as_int(-2.0) == -2 + with pytest.raises(FloatingError): + as_int(2.1) + with pytest.raises(FloatingError): + as_int(-2.1) + assert as_int(2.1, False) == 2 + assert as_int(-2.1, False) == -2 v = np.longdouble(2**64) - assert_equal(as_int(v), 2**64) + assert as_int(v) == 2**64 # Have all long doubles got 63+1 binary bits of precision? Windows 32-bit # longdouble appears to have 52 bit precision, but we avoid that by checking # for known precisions that are less than that required @@ -122,13 +122,15 @@ def test_as_int(): except FloatingError: nmant = 63 # Unknown precision, let's hope it's at least 63 v = np.longdouble(2) ** (nmant + 1) - 1 - assert_equal(as_int(v), 2**(nmant + 1) - 1) + assert as_int(v) == 2**(nmant + 1) - 1 # Check for predictable overflow nexp64 = floor_log2(type_info(np.float64)['max']) with np.errstate(over='ignore'): val = np.longdouble(2**nexp64) * 2 # outside float64 range - assert_raises(OverflowError, as_int, val) - assert_raises(OverflowError, as_int, -val) + with pytest.raises(OverflowError): + as_int(val) + with pytest.raises(OverflowError): + as_int(-val) def test_int_to_float(): @@ -138,18 +140,20 @@ def test_int_to_float(): nmant = type_info(ie3)['nmant'] for p in range(nmant + 3): i = 2**p + 1 - assert_equal(int_to_float(i, ie3), ie3(i)) - assert_equal(int_to_float(-i, ie3), ie3(-i)) + assert int_to_float(i, ie3) == ie3(i) + assert int_to_float(-i, ie3) == ie3(-i) # IEEEs in this case are binary formats only nexp = floor_log2(type_info(ie3)['max']) # Values too large for the format smn, smx = -2**(nexp + 1), 2**(nexp + 1) if ie3 is np.float64: - assert_raises(OverflowError, int_to_float, smn, ie3) - assert_raises(OverflowError, int_to_float, smx, ie3) + with pytest.raises(OverflowError): + int_to_float(smn, ie3) + with pytest.raises(OverflowError): + int_to_float(smx, ie3) else: - assert_equal(int_to_float(smn, ie3), ie3(smn)) - assert_equal(int_to_float(smx, ie3), ie3(smx)) + assert int_to_float(smn, ie3) == ie3(smn) + assert int_to_float(smx, ie3) == ie3(smx) # Longdoubles do better than int, we hope LD = np.longdouble # up to integer precision of float64 nmant, we get the same result as for @@ -157,29 +161,31 @@ def test_int_to_float(): nmant = type_info(np.float64)['nmant'] for p in range(nmant + 2): # implicit i = 2**p - 1 - assert_equal(int_to_float(i, LD), LD(i)) - assert_equal(int_to_float(-i, LD), LD(-i)) + assert int_to_float(i, LD) == LD(i) + assert int_to_float(-i, LD) == LD(-i) # Above max of float64, we're hosed nexp64 = floor_log2(type_info(np.float64)['max']) smn64, smx64 = -2**(nexp64 + 1), 2**(nexp64 + 1) # The algorithm here implemented goes through float64, so supermax and # supermin will cause overflow errors - assert_raises(OverflowError, int_to_float, smn64, LD) - assert_raises(OverflowError, int_to_float, smx64, LD) + with pytest.raises(OverflowError): + int_to_float(smn64, LD) + with pytest.raises(OverflowError): + int_to_float(smx64, LD) try: nmant = type_info(np.longdouble)['nmant'] except FloatingError: # don't know where to test return # test we recover precision just above nmant i = 2**(nmant + 1) - 1 - assert_equal(as_int(int_to_float(i, LD)), i) - assert_equal(as_int(int_to_float(-i, LD)), -i) + assert as_int(int_to_float(i, LD)) == i + assert as_int(int_to_float(-i, LD)) == -i # If longdouble can cope with 2**64, test if nmant >= 63: # Check conversion to int; the line below causes an error subtracting # ints / uint64 values, at least for Python 3.3 and numpy dev 1.8 big_int = np.uint64(2**64 - 1) - assert_equal(as_int(int_to_float(big_int, LD)), big_int) + assert as_int(int_to_float(big_int, LD)) == big_int def test_as_int_np_fix(): @@ -188,13 +194,13 @@ def test_as_int_np_fix(): for t in np.sctypes['int'] + np.sctypes['uint']: info = np.iinfo(t) mn, mx = np.array([info.min, info.max], dtype=t) - assert_equal((mn, mx), (as_int(mn), as_int(mx))) + assert (mn, mx) == (as_int(mn), as_int(mx)) def test_floor_exact_16(): # A normal integer can generate an inf in float16 - assert_equal(floor_exact(2**31, np.float16), np.inf) - assert_equal(floor_exact(-2**31, np.float16), -np.inf) + assert floor_exact(2**31, np.float16) == np.inf + assert floor_exact(-2**31, np.float16) == -np.inf def test_floor_exact_64(): @@ -203,11 +209,11 @@ def test_floor_exact_64(): start = np.float64(2**e) across = start + np.arange(2048, dtype=np.float64) gaps = set(np.diff(across)).difference([0]) - assert_equal(len(gaps), 1) + assert len(gaps) == 1 gap = gaps.pop() - assert_equal(gap, int(gap)) + assert gap == int(gap) test_val = 2**(e + 1) - 1 - assert_equal(floor_exact(test_val, np.float64), 2**(e + 1) - int(gap)) + assert floor_exact(test_val, np.float64) == 2**(e + 1) - int(gap) def test_floor_exact(): @@ -226,21 +232,21 @@ def test_floor_exact(): for t in to_test: # A number bigger than the range returns the max info = type_info(t) - assert_equal(floor_exact(2**5000, t), np.inf) - assert_equal(ceil_exact(2**5000, t), np.inf) + assert floor_exact(2**5000, t) == np.inf + assert ceil_exact(2**5000, t) == np.inf # A number more negative returns -inf - assert_equal(floor_exact(-2**5000, t), -np.inf) - assert_equal(ceil_exact(-2**5000, t), -np.inf) + assert floor_exact(-2**5000, t) == -np.inf + assert ceil_exact(-2**5000, t) == -np.inf # Check around end of integer precision nmant = info['nmant'] for i in range(nmant + 1): iv = 2**i # up to 2**nmant should be exactly representable for func in (int_flex, int_ceex): - assert_equal(func(iv, t), iv) - assert_equal(func(-iv, t), -iv) - assert_equal(func(iv - 1, t), iv - 1) - assert_equal(func(-iv + 1, t), -iv + 1) + assert func(iv, t) == iv + assert func(-iv, t) == -iv + assert func(iv - 1, t) == iv - 1 + assert func(-iv + 1, t) == -iv + 1 if t is np.longdouble and ( on_powerpc() or longdouble_precision_improved()): @@ -251,28 +257,28 @@ def test_floor_exact(): continue # Confirm to ourselves that 2**(nmant+1) can't be exactly represented iv = 2**(nmant + 1) - assert_equal(int_flex(iv + 1, t), iv) - assert_equal(int_ceex(iv + 1, t), iv + 2) + assert int_flex(iv + 1, t) == iv + assert int_ceex(iv + 1, t) == iv + 2 # negatives - assert_equal(int_flex(-iv - 1, t), -iv - 2) - assert_equal(int_ceex(-iv - 1, t), -iv) + assert int_flex(-iv - 1, t) == -iv - 2 + assert int_ceex(-iv - 1, t) == -iv # The gap in representable numbers is 2 above 2**(nmant+1), 4 above # 2**(nmant+2), and so on. for i in range(5): iv = 2**(nmant + 1 + i) gap = 2**(i + 1) - assert_equal(as_int(t(iv) + t(gap)), iv + gap) + assert as_int(t(iv) + t(gap)) == iv + gap for j in range(1, gap): - assert_equal(int_flex(iv + j, t), iv) - assert_equal(int_flex(iv + gap + j, t), iv + gap) - assert_equal(int_ceex(iv + j, t), iv + gap) - assert_equal(int_ceex(iv + gap + j, t), iv + 2 * gap) + assert int_flex(iv + j, t) == iv + assert int_flex(iv + gap + j, t) == iv + gap + assert int_ceex(iv + j, t) == iv + gap + assert int_ceex(iv + gap + j, t) == iv + 2 * gap # negatives for j in range(1, gap): - assert_equal(int_flex(-iv - j, t), -iv - gap) - assert_equal(int_flex(-iv - gap - j, t), -iv - 2 * gap) - assert_equal(int_ceex(-iv - j, t), -iv) - assert_equal(int_ceex(-iv - gap - j, t), -iv - gap) + assert int_flex(-iv - j, t) == -iv - gap + assert int_flex(-iv - gap - j, t) == -iv - 2 * gap + assert int_ceex(-iv - j, t) == -iv + assert int_ceex(-iv - gap - j, t) == -iv - gap def test_usable_binary128(): @@ -280,7 +286,7 @@ def test_usable_binary128(): yes = have_binary128() with np.errstate(over='ignore'): exp_test = np.longdouble(2) ** 16383 - assert_equal(yes, - exp_test.dtype.itemsize == 16 and + assert (yes == + (exp_test.dtype.itemsize == 16 and np.isfinite(exp_test) and - _check_nmant(np.longdouble, 112)) + _check_nmant(np.longdouble, 112))) diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 447555d6d0..94645f2839 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -18,7 +18,7 @@ from ..tmpdirs import InTemporaryDirectory from numpy.testing import assert_array_equal -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +import pytest _counter = 0 @@ -33,7 +33,8 @@ def _as_fname(img): def test_concat(): # Smoke test: concat empty list. - assert_raises(ValueError, concat_images, []) + with pytest.raises(ValueError): + concat_images([]) # Build combinations of 3D, 4D w/size[3] == 1, and 4D w/size[3] == 3 all_shapes_5D = ((1, 4, 5, 3, 3), @@ -104,25 +105,23 @@ def test_concat(): all_imgs = concat_images([img0, img1], **concat_imgs_kwargs) except ValueError as ve: - assert_true(expect_error, str(ve)) + assert expect_error, str(ve) else: - assert_false( - expect_error, "Expected a concatenation error, but got none.") + assert not expect_error, "Expected a concatenation error, but got none." assert_array_equal(all_imgs.get_fdata(), all_data) assert_array_equal(all_imgs.affine, affine) # check that not-matching affines raise error - assert_raises(ValueError, concat_images, [ - img0, img2], **concat_imgs_kwargs) + with pytest.raises(ValueError): + concat_images([img0, img2], **concat_imgs_kwargs) # except if check_affines is False try: all_imgs = concat_images([img0, img1], **concat_imgs_kwargs) except ValueError as ve: - assert_true(expect_error, str(ve)) + assert expect_error, str(ve) else: - assert_false( - expect_error, "Expected a concatenation error, but got none.") + assert not expect_error, "Expected a concatenation error, but got none." assert_array_equal(all_imgs.get_fdata(), all_data) assert_array_equal(all_imgs.affine, affine) @@ -134,12 +133,12 @@ def test_closest_canonical(): # Test with an AnalyzeImage first img = AnalyzeImage(arr, np.eye(4)) xyz_img = as_closest_canonical(img) - assert_true(img is xyz_img) + assert img is xyz_img # And a case where the Analyze image has to be flipped img = AnalyzeImage(arr, np.diag([-1, 1, 1, 1])) xyz_img = as_closest_canonical(img) - assert_false(img is xyz_img) + assert not img is xyz_img out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) @@ -151,14 +150,14 @@ def test_closest_canonical(): # re-order them properly img.header.set_dim_info(0, 1, 2) xyz_img = as_closest_canonical(img) - assert_true(img is xyz_img) + assert img is xyz_img # a axis flip img = Nifti1Image(arr, np.diag([-1, 1, 1, 1])) img.header.set_dim_info(0, 1, 2) xyz_img = as_closest_canonical(img) - assert_false(img is xyz_img) - assert_true(img.header.get_dim_info() == xyz_img.header.get_dim_info()) + assert not img is xyz_img + assert img.header.get_dim_info() == xyz_img.header.get_dim_info() out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) @@ -170,9 +169,10 @@ def test_closest_canonical(): # although it's more or less canonical already img = Nifti1Image(arr, aff) xyz_img = as_closest_canonical(img) - assert_true(img is xyz_img) + assert img is xyz_img # it's still not diagnonal - assert_raises(OrientationError, as_closest_canonical, img, True) + with pytest.raises(OrientationError): + as_closest_canonical(img, True) # an axis swap aff = np.diag([1, 0, 0, 1]) @@ -181,14 +181,14 @@ def test_closest_canonical(): img.header.set_dim_info(0, 1, 2) xyz_img = as_closest_canonical(img) - assert_false(img is xyz_img) + assert not img is xyz_img # Check both the original and new objects - assert_true(img.header.get_dim_info() == (0, 1, 2)) - assert_true(xyz_img.header.get_dim_info() == (0, 2, 1)) + assert img.header.get_dim_info() == (0, 1, 2) + assert xyz_img.header.get_dim_info() == (0, 2, 1) out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.transpose(arr, (0, 2, 1, 3))) # same axis swap but with None dim info (except for slice dim) img.header.set_dim_info(None, None, 2) xyz_img = as_closest_canonical(img) - assert_true(xyz_img.header.get_dim_info() == (None, None, 1)) + assert xyz_img.header.get_dim_info() == (None, None, 1) diff --git a/nibabel/tests/test_h5py_compat.py b/nibabel/tests/test_h5py_compat.py index af5c50989c..325645a18c 100644 --- a/nibabel/tests/test_h5py_compat.py +++ b/nibabel/tests/test_h5py_compat.py @@ -10,7 +10,6 @@ from ..optpkg import optional_package from .. import _h5py_compat as compat -from ..testing import assert_equal, assert_true, assert_false, assert_not_equal h5py, have_h5py, _ = optional_package('h5py') @@ -18,30 +17,30 @@ def test_optpkg_equivalence(): # No effect on Linux/OSX if os.name == 'posix': - assert_equal(have_h5py, compat.have_h5py) + assert have_h5py == compat.have_h5py # No effect on Python 2.7 or 3.6+ if sys.version_info >= (3, 6) or sys.version_info < (3,): - assert_equal(have_h5py, compat.have_h5py) + assert have_h5py == compat.have_h5py # Available in a strict subset of cases if not have_h5py: - assert_false(compat.have_h5py) + assert not compat.have_h5py # Available when version is high enough elif LooseVersion(h5py.__version__) >= '2.10': - assert_true(compat.have_h5py) + assert compat.have_h5py def test_disabled_h5py_cases(): # On mismatch if have_h5py and not compat.have_h5py: # Recapitulate min_h5py conditions from _h5py_compat - assert_equal(os.name, 'nt') - assert_true((3,) <= sys.version_info < (3, 6)) - assert_true(LooseVersion(h5py.__version__) < '2.10') + assert os.name == 'nt' + assert (3,) <= sys.version_info < (3, 6) + assert LooseVersion(h5py.__version__) < '2.10' # Verify that the root cause is present # If any tests fail, they will likely be these, so they may be # ill-advised... if LooseVersion(np.__version__) < '1.18': - assert_equal(str(np.longdouble), str(np.float64)) + assert str(np.longdouble) == str(np.float64) else: - assert_not_equal(str(np.longdouble), str(np.float64)) - assert_not_equal(np.longdouble, np.float64) + assert str(np.longdouble) != str(np.float64) + assert np.longdouble != np.float64 diff --git a/nibabel/tests/test_helpers.py b/nibabel/tests/test_helpers.py index 928b4bd1a3..49112fddfb 100644 --- a/nibabel/tests/test_helpers.py +++ b/nibabel/tests/test_helpers.py @@ -4,12 +4,9 @@ import numpy as np -from ..openers import ImageOpener -from ..tmpdirs import InTemporaryDirectory from ..optpkg import optional_package _, have_scipy, _ = optional_package('scipy.io') -from nose.tools import assert_true from numpy.testing import assert_array_equal @@ -51,6 +48,6 @@ def assert_data_similar(arr, params): return summary = params['data_summary'] real_arr = np.asarray(arr) - assert_true(np.allclose( + assert np.allclose( (real_arr.min(), real_arr.max(), real_arr.mean()), - (summary['min'], summary['max'], summary['mean']))) + (summary['min'], summary['max'], summary['mean'])) From 50de669184375ce3cc551fda270204a296ed3abe Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 15 Nov 2019 15:23:20 -0500 Subject: [PATCH 450/689] fixing interaction between fixtures --- nibabel/tests/test_data.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index 0fc0bdc40d..e48356eb50 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -19,21 +19,19 @@ import pytest -from .test_environment import (setup_environment, - teardown_environment, +from .test_environment import (with_environment, DATA_KEY, USER_KEY) + @pytest.fixture() -def with_environment(request): - setup_environment() +def with_nimd_env(request, with_environment): DATA_FUNCS = {} DATA_FUNCS['home_dir_func'] = nibd.get_nipy_user_dir DATA_FUNCS['sys_dir_func'] = nibd.get_nipy_system_dir DATA_FUNCS['path_func'] = nibd.get_data_path def teardown_data_env(): - teardown_environment() nibd.get_nipy_user_dir = DATA_FUNCS['home_dir_func'] nibd.get_nipy_system_dir = DATA_FUNCS['sys_dir_func'] nibd.get_data_path = DATA_FUNCS['path_func'] @@ -116,7 +114,7 @@ def test__cfg_value(): pass -def test_data_path(with_environment): +def test_data_path(with_nimd_env): # wipe out any sources of data paths if DATA_KEY in env: del env[DATA_KEY] @@ -189,7 +187,7 @@ def test_find_data_dir(): assert dd == here -def test_make_datasource(with_environment): +def test_make_datasource(with_nimd_env): pkg_def = dict( relpath='pkg') with TemporaryDirectory() as tmpdir: @@ -219,7 +217,7 @@ def test_bomber_inspect(): assert not hasattr(b, 'any_attribute') -def test_datasource_or_bomber(with_environment): +def test_datasource_or_bomber(with_nimd_env): pkg_def = dict( relpath='pkg') with TemporaryDirectory() as tmpdir: From def8ceb7259434cc42bf07d2921e3b90a1579556 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 15 Nov 2019 15:24:41 -0500 Subject: [PATCH 451/689] moving test_helpers to testing/helpers.py (it contains no tests, but helping functions) --- nibabel/testing_pytest/__init__.py | 5 ++--- nibabel/{tests/test_helpers.py => testing_pytest/helpers.py} | 0 nibabel/tests/test_analyze.py | 2 +- nibabel/tests/test_brikhead.py | 3 +-- nibabel/tests/test_image_api.py | 4 ++-- nibabel/tests/test_minc1.py | 2 +- nibabel/tests/test_nifti1.py | 2 +- nibabel/tests/test_scripts.py | 2 +- nibabel/tests/test_spatialimages.py | 2 +- nibabel/tests/test_spm99analyze.py | 2 +- 10 files changed, 11 insertions(+), 13 deletions(-) rename nibabel/{tests/test_helpers.py => testing_pytest/helpers.py} (100%) diff --git a/nibabel/testing_pytest/__init__.py b/nibabel/testing_pytest/__init__.py index 675c506e3b..38143d9c38 100644 --- a/nibabel/testing_pytest/__init__.py +++ b/nibabel/testing_pytest/__init__.py @@ -22,7 +22,8 @@ slow = dec.slow from ..deprecated import deprecate_with_version as _deprecate_with_version - +from .np_features import memmap_after_ufunc +from .helpers import bytesio_filemap, bytesio_round_trip, assert_data_similar from itertools import zip_longest @@ -45,8 +46,6 @@ def test_data(subdir=None, fname=None): data_path = test_data() -from .np_features import memmap_after_ufunc - def assert_dt_equal(a, b): """ Assert two numpy dtype specifiers are equal diff --git a/nibabel/tests/test_helpers.py b/nibabel/testing_pytest/helpers.py similarity index 100% rename from nibabel/tests/test_helpers.py rename to nibabel/testing_pytest/helpers.py diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index b0cc566979..9032f136c5 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -38,7 +38,7 @@ from .test_wrapstruct import _TestLabeledWrapStruct from . import test_spatialimages as tsi -from .test_helpers import bytesio_filemap, bytesio_round_trip +from ..testing_pytest import bytesio_filemap, bytesio_round_trip header_file = os.path.join(data_path, 'analyze.hdr') diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index 078193ce48..60bd008a46 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -16,10 +16,9 @@ import pytest from numpy.testing import assert_array_equal -from ..testing_pytest import data_path +from ..testing_pytest import data_path, assert_data_similar from .test_fileslice import slicer_samples -from .test_helpers import assert_data_similar EXAMPLE_IMAGES = [ dict( diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index f91b61af9e..b9a066f598 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -46,12 +46,12 @@ from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns, assert_allclose from ..testing import clear_and_catch_warnings +from ..testing_pytest import (bytesio_round_trip, bytesio_filemap, + assert_data_similar) from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError from .test_api_validators import ValidateAPI -from .test_helpers import (bytesio_round_trip, bytesio_filemap, - assert_data_similar) from .test_minc1 import EXAMPLE_IMAGES as MINC1_EXAMPLE_IMAGES from .test_minc2 import EXAMPLE_IMAGES as MINC2_EXAMPLE_IMAGES from .test_parrec import EXAMPLE_IMAGES as PARREC_EXAMPLE_IMAGES diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index eb9ff15cab..c7b044ed9b 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -27,10 +27,10 @@ from ..testing import (assert_true, assert_equal, assert_false, assert_raises, assert_warns, assert_array_equal, data_path, clear_and_catch_warnings) from ..deprecator import ExpiredDeprecationError +from ..testing_pytest import assert_data_similar from . import test_spatialimages as tsi from .test_fileslice import slicer_samples -from .test_helpers import assert_data_similar EG_FNAME = pjoin(data_path, 'tiny.mnc') diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 0213b615f1..dd98972df5 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -29,8 +29,8 @@ from .test_arraywriters import rt_err_estimate, IUINT_TYPES from .test_orientations import ALL_ORNTS -from .test_helpers import bytesio_filemap, bytesio_round_trip from .nibabel_data import get_nibabel_data, needs_nibabel_data +from ..testing_pytest import bytesio_filemap, bytesio_round_trip from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 6d46e57c5c..431e606380 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -31,7 +31,7 @@ from .test_parrec import (DTI_PAR_BVECS, DTI_PAR_BVALS, EXAMPLE_IMAGES as PARREC_EXAMPLES) from .test_parrec_data import BALLS, AFF_OFF -from .test_helpers import assert_data_similar +from ..testing_pytest import assert_data_similar def _proc_stdout(stdout): diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 8b11e5cc51..3f9c8b4b97 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -24,9 +24,9 @@ assert_not_equal, assert_raises) from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_warns -from .test_helpers import bytesio_round_trip from ..testing import (clear_and_catch_warnings, suppress_warnings, memmap_after_ufunc) +from ..testing_pytest import bytesio_round_trip from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError from .. import load as top_load diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 86143f35ab..71fe41e2ec 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -29,9 +29,9 @@ from nose.tools import assert_true, assert_false, assert_equal, assert_raises from ..testing import assert_allclose_safely, suppress_warnings +from ..testing_pytest import bytesio_round_trip, bytesio_filemap from . import test_analyze -from .test_helpers import bytesio_round_trip, bytesio_filemap FLOAT_TYPES = np.sctypes['float'] COMPLEX_TYPES = np.sctypes['complex'] From 629dc57549e598491151caea76f6f026e56ad713 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 15 Nov 2019 15:26:34 -0500 Subject: [PATCH 452/689] adding new tests to travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 63176ce581..d3d49e1853 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,7 +130,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py + pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py ../nibabel/tests/test_e*.py ../nibabel/tests/test_f*.py ../nibabel/tests/test_h*.py pytest -v ../nibabel/tests/test_w*.py else false From 51cc6aa73297f8573ccf179e97ab173501752481 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 15 Nov 2019 16:56:01 -0500 Subject: [PATCH 453/689] adding more tests --- .travis.yml | 4 +- nibabel/tests/test_image_api.py | 155 ++++++++++++++------------ nibabel/tests/test_image_load_save.py | 120 ++++++++++---------- nibabel/tests/test_image_types.py | 5 +- nibabel/tests/test_imageclasses.py | 21 ++-- nibabel/tests/test_imageglobals.py | 8 +- nibabel/tests/test_keywordonly.py | 42 ++++--- nibabel/tests/test_loadsave.py | 22 ++-- nibabel/tests/test_minc1.py | 39 +++---- nibabel/tests/test_minc2.py | 4 +- nibabel/tests/test_minc2_data.py | 7 +- nibabel/tests/test_mriutils.py | 14 +-- 12 files changed, 225 insertions(+), 216 deletions(-) diff --git a/.travis.yml b/.travis.yml index d3d49e1853..43ddf9dde1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,7 +130,9 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py ../nibabel/tests/test_e*.py ../nibabel/tests/test_f*.py ../nibabel/tests/test_h*.py + pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py + pytest -v../nibabel/tests/test_e*.py ../nibabel/tests/test_f*.py ../nibabel/tests/test_h*.py + pytest -v../nibabel/tests/test_i*.py ../nibabel/tests/test_k*.py ../nibabel/tests/test_l*.py ../nibabel/tests/test_m*.py pytest -v ../nibabel/tests/test_w*.py else false diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index b9a066f598..ee8c287df2 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -41,13 +41,11 @@ from ..spatialimages import SpatialImage from .. import minc1, minc2, parrec, brikhead -from nose import SkipTest -from nose.tools import (assert_true, assert_false, assert_raises, assert_equal) +import pytest from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns, assert_allclose -from ..testing import clear_and_catch_warnings from ..testing_pytest import (bytesio_round_trip, bytesio_filemap, - assert_data_similar) + assert_data_similar, clear_and_catch_warnings) from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError @@ -109,7 +107,8 @@ def validate_header(self, imaker, params): img = imaker() hdr = img.header # we can fetch it # Read only - assert_raises(AttributeError, setattr, img, 'header', hdr) + with pytest.raises(AttributeError): + setattr(img, 'header', hdr) def validate_header_deprecated(self, imaker, params): # Check deprecated header API @@ -117,13 +116,14 @@ def validate_header_deprecated(self, imaker, params): with clear_and_catch_warnings() as w: warnings.simplefilter('always', DeprecationWarning) hdr = img.get_header() - assert_equal(len(w), 1) - assert_true(hdr is img.header) + assert len(w) == 1 + assert hdr is img.header def validate_filenames(self, imaker, params): # Validate the filename, file_map interface + if not self.can_save: - raise SkipTest + pytest.skip() img = imaker() img.set_data_dtype(np.float32) # to avoid rounding in load / save # Make sure the object does not have a file_map @@ -145,8 +145,8 @@ def validate_filenames(self, imaker, params): fname = 'an_image' + self.standard_extension for path in (fname, pathlib.Path(fname)): img.set_filename(path) - assert_equal(img.get_filename(), str(path)) - assert_equal(img.file_map['image'].filename, str(path)) + assert img.get_filename() == str(path) + assert img.file_map['image'].filename == str(path) # to_ / from_ filename fname = 'another_image' + self.standard_extension for path in (fname, pathlib.Path(fname)): @@ -163,8 +163,10 @@ def validate_filenames(self, imaker, params): def validate_no_slicing(self, imaker, params): img = imaker() - assert_raises(TypeError, img.__getitem__, 'string') - assert_raises(TypeError, img.__getitem__, slice(None)) + with pytest.raises(TypeError): + img.__getitem__('string') + with pytest.raises(TypeError): + img.__getitem__(slice(None)) def validate_get_data_deprecated(self, imaker, params): # Check deprecated header API @@ -184,19 +186,19 @@ def validate_dtype(self, imaker, params): # data / storage dtype img = imaker() # Need to rename this one - assert_equal(img.get_data_dtype().type, params['dtype']) + assert img.get_data_dtype().type == params['dtype'] # dtype survives round trip if self.has_scaling and self.can_save: with np.errstate(invalid='ignore'): rt_img = bytesio_round_trip(img) - assert_equal(rt_img.get_data_dtype().type, params['dtype']) + assert rt_img.get_data_dtype().type == params['dtype'] # Setting to a different dtype img.set_data_dtype(np.float32) # assumed supported for all formats - assert_equal(img.get_data_dtype().type, np.float32) + assert img.get_data_dtype().type == np.float32 # dtype survives round trip if self.can_save: rt_img = bytesio_round_trip(img) - assert_equal(rt_img.get_data_dtype().type, np.float32) + assert rt_img.get_data_dtype().type == np.float32 class DataInterfaceMixin(GetSetDtypeMixin): @@ -210,8 +212,8 @@ class DataInterfaceMixin(GetSetDtypeMixin): def validate_data_interface(self, imaker, params): # Check get data returns array, and caches img = imaker() - assert_equal(img.shape, img.dataobj.shape) - assert_equal(img.ndim, len(img.shape)) + assert img.shape == img.dataobj.shape + assert img.ndim == len(img.shape) assert_data_similar(img.dataobj, params) for meth_name in self.meth_names: if params['is_proxy']: @@ -219,53 +221,56 @@ def validate_data_interface(self, imaker, params): else: # Array image self._check_array_interface(imaker, meth_name) # Data shape is same as image shape - assert_equal(img.shape, getattr(img, meth_name)().shape) + assert img.shape == getattr(img, meth_name)().shape # Data ndim is same as image ndim - assert_equal(img.ndim, getattr(img, meth_name)().ndim) + assert img.ndim == getattr(img, meth_name)().ndim # Values to get_data caching parameter must be 'fill' or # 'unchanged' - assert_raises(ValueError, img.get_data, caching='something') + with pytest.raises(ValueError): + img.get_data(caching='something') # dataobj is read only fake_data = np.zeros(img.shape).astype(img.get_data_dtype()) - assert_raises(AttributeError, setattr, img, 'dataobj', fake_data) + with pytest.raises(AttributeError): + setattr(img, 'dataobj', fake_data) # So is in_memory - assert_raises(AttributeError, setattr, img, 'in_memory', False) + with pytest.raises(AttributeError): + setattr(img, 'in_memory', False) def _check_proxy_interface(self, imaker, meth_name): # Parameters assert this is an array proxy img = imaker() # Does is_proxy agree? - assert_true(is_proxy(img.dataobj)) + assert is_proxy(img.dataobj) # Confirm it is not a numpy array - assert_false(isinstance(img.dataobj, np.ndarray)) + assert not isinstance(img.dataobj, np.ndarray) # Confirm it can be converted to a numpy array with asarray proxy_data = np.asarray(img.dataobj) proxy_copy = proxy_data.copy() # Not yet cached, proxy image: in_memory is False - assert_false(img.in_memory) + assert not img.in_memory # Load with caching='unchanged' method = getattr(img, meth_name) data = method(caching='unchanged') # Still not cached - assert_false(img.in_memory) + assert not img.in_memory # Default load, does caching data = method() # Data now cached. in_memory is True if either of the get_data # or get_fdata caches are not-None - assert_true(img.in_memory) + assert img.in_memory # We previously got proxy_data from disk, but data, which we # have just fetched, is a fresh copy. - assert_false(proxy_data is data) + assert not proxy_data is data # asarray on dataobj, applied above, returns same numerical # values. This might not be true get_fdata operating on huge # integers, but lets assume that's not true here. assert_array_equal(proxy_data, data) # Now caching='unchanged' does nothing, returns cached version data_again = method(caching='unchanged') - assert_true(data is data_again) + assert data is data_again # caching='fill' does nothing because the cache is already full data_yet_again = method(caching='fill') - assert_true(data is data_yet_again) + assert data is data_yet_again # changing array data does not change proxy data, or reloaded # data data[:] = 42 @@ -276,16 +281,16 @@ def _check_proxy_interface(self, imaker, meth_name): # until we uncache img.uncache() # Which unsets in_memory - assert_false(img.in_memory) + assert not img.in_memory assert_array_equal(method(), proxy_copy) # Check caching='fill' does cache data img = imaker() method = getattr(img, meth_name) - assert_false(img.in_memory) + assert not img.in_memory data = method(caching='fill') - assert_true(img.in_memory) + assert img.in_memory data_again = method() - assert_true(data is data_again) + assert data is data_again # Check the interaction of caching with get_data, get_fdata. # Caching for `get_data` should have no effect on caching for # get_fdata, and vice versa. @@ -297,21 +302,21 @@ def _check_proxy_interface(self, imaker, meth_name): other_data = other_method() # We get the original data, not the modified cache assert_array_equal(proxy_data, other_data) - assert_false(np.all(data == other_data)) + assert not np.all(data == other_data) # We can modify the other cache, without affecting the first other_data[:] = 44 assert_array_equal(other_method(), 44) - assert_false(np.all(method() == other_method())) + assert not np.all(method() == other_method()) if meth_name != 'get_fdata': return # Check that caching refreshes for new floating point type. img.uncache() fdata = img.get_fdata() - assert_equal(fdata.dtype, np.float64) + assert fdata.dtype == np.float64 fdata[:] = 42 fdata_back = img.get_fdata() assert_array_equal(fdata_back, 42) - assert_equal(fdata_back.dtype, np.float64) + assert fdata_back.dtype == np.float64 # New data dtype, no caching, doesn't use or alter cache fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') # We get back the original read, not the modified cache @@ -319,7 +324,7 @@ def _check_proxy_interface(self, imaker, meth_name): # factors, rather than 64-bit factors and then cast to float-32 # Use rtol/atol from numpy.allclose assert_allclose(fdata_new_dt, proxy_data.astype('f4'), rtol=1e-05, atol=1e-08) - assert_equal(fdata_new_dt.dtype, np.float32) + assert fdata_new_dt.dtype == np.float32 # The original cache stays in place, for default float64 assert_array_equal(img.get_fdata(), 42) # And for not-default float32, because we haven't cached @@ -345,8 +350,8 @@ def _check_array_caching(self, imaker, meth_name, caching): method = getattr(img, meth_name) get_data_func = (method if caching is None else partial(method, caching=caching)) - assert_true(isinstance(img.dataobj, np.ndarray)) - assert_true(img.in_memory) + assert isinstance(img.dataobj, np.ndarray) + assert img.in_memory data = get_data_func() # Returned data same object as underlying dataobj if using # old ``get_data`` method, or using newer ``get_fdata`` @@ -356,10 +361,10 @@ def _check_array_caching(self, imaker, meth_name, caching): # Set something to the output array. data[:] = 42 get_result_changed = np.all(get_data_func() == 42) - assert_equal(get_result_changed, - dataobj_is_data or caching != 'unchanged') + assert (get_result_changed == + (dataobj_is_data or caching != 'unchanged')) if dataobj_is_data: - assert_true(data is img.dataobj) + assert data is img.dataobj # Changing array data changes # data assert_array_equal(np.asarray(img.dataobj), 42) @@ -367,15 +372,15 @@ def _check_array_caching(self, imaker, meth_name, caching): img.uncache() assert_array_equal(get_data_func(), 42) else: - assert_false(data is img.dataobj) - assert_false(np.all(np.asarray(img.dataobj) == 42)) + assert not data is img.dataobj + assert not np.all(np.asarray(img.dataobj) == 42) # Uncache does have an effect img.uncache() - assert_false(np.all(get_data_func() == 42)) + assert not np.all(get_data_func() == 42) # in_memory is always true for array images, regardless of # cache state. img.uncache() - assert_true(img.in_memory) + assert img.in_memory if meth_name != 'get_fdata': return # Return original array from get_fdata only if the input array is the @@ -385,7 +390,7 @@ def _check_array_caching(self, imaker, meth_name, caching): return for float_type in float_types: data = get_data_func(dtype=float_type) - assert_equal(data is img.dataobj, arr_dtype == float_type) + assert (data is img.dataobj) == (arr_dtype == float_type) def validate_data_deprecated(self, imaker, params): # Check _data property still exists, but raises warning @@ -393,37 +398,40 @@ def validate_data_deprecated(self, imaker, params): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") assert_data_similar(img._data, params) - assert_equal(warns.pop(0).category, DeprecationWarning) + assert warns.pop(0).category == DeprecationWarning # Check setting _data raises error fake_data = np.zeros(img.shape).astype(img.get_data_dtype()) - assert_raises(AttributeError, setattr, img, '_data', fake_data) + with pytest.raises(AttributeError): + setattr(img, '_data', fake_data) def validate_shape(self, imaker, params): # Validate shape img = imaker() # Same as expected shape - assert_equal(img.shape, params['shape']) + assert img.shape == params['shape'] # Same as array shape if passed if 'data' in params: - assert_equal(img.shape, params['data'].shape) + assert img.shape == params['data'].shape # Read only - assert_raises(AttributeError, setattr, img, 'shape', np.eye(4)) + with pytest.raises(AttributeError): + setattr(img, 'shape', np.eye(4)) def validate_ndim(self, imaker, params): # Validate shape img = imaker() # Same as expected ndim - assert_equal(img.ndim, len(params['shape'])) + assert img.ndim == len(params['shape']) # Same as array ndim if passed if 'data' in params: - assert_equal(img.ndim, params['data'].ndim) + assert img.ndim == params['data'].ndim # Read only - assert_raises(AttributeError, setattr, img, 'ndim', 5) + with pytest.raises(AttributeError): + setattr(img, 'ndim', 5) def validate_shape_deprecated(self, imaker, params): # Check deprecated get_shape API img = imaker() - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): img.get_shape() def validate_mmap_parameter(self, imaker, params): @@ -448,10 +456,10 @@ def validate_mmap_parameter(self, imaker, params): rt_img = img.__class__.from_filename(fname, mmap='r') assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) # r+ is specifically not valid for images - assert_raises(ValueError, - img.__class__.from_filename, fname, mmap='r+') - assert_raises(ValueError, - img.__class__.from_filename, fname, mmap='invalid') + with pytest.raises(ValueError): + img.__class__.from_filename(fname, mmap='r+') + with pytest.raises(ValueError): + img.__class__.from_filename(fname, mmap='invalid') del rt_img # to allow windows to delete the directory @@ -469,8 +477,8 @@ def validate_header_shape(self, imaker, params): shape = hdr.get_data_shape() new_shape = (shape[0] + 1,) + shape[1:] hdr.set_data_shape(new_shape) - assert_true(img.header is hdr) - assert_equal(img.header.get_data_shape(), new_shape) + assert img.header is hdr + assert img.header.get_data_shape() == new_shape class AffineMixin(object): @@ -484,11 +492,12 @@ def validate_affine(self, imaker, params): # Check affine API img = imaker() assert_almost_equal(img.affine, params['affine'], 6) - assert_equal(img.affine.dtype, np.float64) + assert img.affine.dtype == np.float64 img.affine[0, 0] = 1.5 - assert_equal(img.affine[0, 0], 1.5) + assert img.affine[0, 0] == 1.5 # Read only - assert_raises(AttributeError, setattr, img, 'affine', np.eye(4)) + with pytest.raises(AttributeError): + setattr(img, 'affine', np.eye(4)) def validate_affine_deprecated(self, imaker, params): # Check deprecated affine API @@ -496,11 +505,11 @@ def validate_affine_deprecated(self, imaker, params): with clear_and_catch_warnings() as w: warnings.simplefilter('always', DeprecationWarning) assert_almost_equal(img.get_affine(), params['affine'], 6) - assert_equal(len(w), 1) - assert_equal(img.get_affine().dtype, np.float64) + assert len(w) == 1 + assert img.get_affine().dtype == np.float64 aff = img.get_affine() aff[0, 0] = 1.5 - assert_true(aff is img.get_affine()) + assert aff is img.get_affine() class SerializeMixin(object): @@ -584,7 +593,7 @@ def obj_params(self): def validate_path_maybe_image(self, imaker, params): for img_params in self.example_images: test, sniff = self.klass.path_maybe_image(img_params['fname']) - assert_true(isinstance(test, bool)) + assert isinstance(test, bool) if sniff is not None: assert isinstance(sniff[0], bytes) assert isinstance(sniff[1], str) @@ -707,7 +716,7 @@ class TestMinc2API(TestMinc1API): def __init__(self): if not have_h5py: - raise SkipTest('Need h5py for these tests') + pytest.skip('Need h5py for these tests') klass = image_maker = Minc2Image loader = minc2.load diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 9d58a3ed60..8dd64f8185 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -30,7 +30,7 @@ from ..spatialimages import SpatialImage from numpy.testing import assert_array_equal, assert_array_almost_equal -from nose.tools import assert_true, assert_equal, assert_not_equal, assert_raises +import pytest _, have_scipy, _ = optional_package('scipy') # No scipy=>no SPM-format writing DATA_PATH = pjoin(dirname(__file__), 'data') @@ -68,15 +68,15 @@ def test_save_load_endian(): data = np.arange(np.prod(shape), dtype='f4').reshape(shape) # Native endian image img = Nifti1Image(data, affine) - assert_equal(img.header.endianness, native_code) + assert img.header.endianness == native_code img2 = round_trip(img) - assert_equal(img2.header.endianness, native_code) + assert img2.header.endianness == native_code assert_array_equal(img2.get_fdata(), data) assert_array_equal(np.asanyarray(img2.dataobj), data) # byte swapped endian image bs_hdr = img.header.as_byteswapped() bs_img = Nifti1Image(data, affine, bs_hdr) - assert_equal(bs_img.header.endianness, swapped_code) + assert bs_img.header.endianness == swapped_code # of course the data is the same because it's not written to disk assert_array_equal(bs_img.get_fdata(), data) assert_array_equal(np.asanyarray(bs_img.dataobj), data) @@ -84,27 +84,27 @@ def test_save_load_endian(): cbs_img = AnalyzeImage.from_image(bs_img) # this will make the header native by doing the header conversion cbs_hdr = cbs_img.header - assert_equal(cbs_hdr.endianness, native_code) + assert cbs_hdr.endianness == native_code # and the byte order follows it back into another image cbs_img2 = Nifti1Image.from_image(cbs_img) cbs_hdr2 = cbs_img2.header - assert_equal(cbs_hdr2.endianness, native_code) + assert cbs_hdr2.endianness == native_code # Try byteswapped round trip bs_img2 = round_trip(bs_img) bs_data2 = np.asanyarray(bs_img2.dataobj) bs_fdata2 = bs_img2.get_fdata() # now the data dtype was swapped endian, so the read data is too - assert_equal(bs_data2.dtype.byteorder, swapped_code) - assert_equal(bs_img2.header.endianness, swapped_code) + assert bs_data2.dtype.byteorder == swapped_code + assert bs_img2.header.endianness == swapped_code assert_array_equal(bs_data2, data) # but get_fdata uses native endian - assert_not_equal(bs_fdata2.dtype.byteorder, swapped_code) + assert bs_fdata2.dtype.byteorder != swapped_code assert_array_equal(bs_fdata2, data) # Now mix up byteswapped data and non-byteswapped header mixed_img = Nifti1Image(bs_data2, affine) - assert_equal(mixed_img.header.endianness, native_code) + assert mixed_img.header.endianness == native_code m_img2 = round_trip(mixed_img) - assert_equal(m_img2.header.endianness, native_code) + assert m_img2.header.endianness == native_code assert_array_equal(m_img2.get_fdata(), data) @@ -121,7 +121,7 @@ def test_save_load(): sifn = 'another_image.img' ni1.save(img, nifn) re_img = nils.load(nifn) - assert_true(isinstance(re_img, ni1.Nifti1Image)) + assert isinstance(re_img, ni1.Nifti1Image) assert_array_equal(re_img.get_fdata(), data) assert_array_equal(re_img.affine, affine) # These and subsequent del statements are to prevent confusing @@ -131,20 +131,20 @@ def test_save_load(): if have_scipy: # skip we we cannot read .mat files spm2.save(img, sifn) re_img2 = nils.load(sifn) - assert_true(isinstance(re_img2, spm2.Spm2AnalyzeImage)) + assert isinstance(re_img2, spm2.Spm2AnalyzeImage) assert_array_equal(re_img2.get_fdata(), data) assert_array_equal(re_img2.affine, affine) del re_img2 spm99.save(img, sifn) re_img3 = nils.load(sifn) - assert_true(isinstance(re_img3, - spm99.Spm99AnalyzeImage)) + assert isinstance(re_img3, + spm99.Spm99AnalyzeImage) assert_array_equal(re_img3.get_fdata(), data) assert_array_equal(re_img3.affine, affine) ni1.save(re_img3, nifn) del re_img3 re_img = nils.load(nifn) - assert_true(isinstance(re_img, ni1.Nifti1Image)) + assert isinstance(re_img, ni1.Nifti1Image) assert_array_equal(re_img.get_fdata(), data) assert_array_equal(re_img.affine, affine) del re_img @@ -159,13 +159,13 @@ def test_two_to_one(): affine[:3, 3] = [3, 2, 1] # single file format img = ni1.Nifti1Image(data, affine) - assert_equal(img.header['magic'], b'n+1') + assert img.header['magic'] == b'n+1' str_io = BytesIO() img.file_map['image'].fileobj = str_io # check that the single format vox offset stays at zero img.to_file_map() - assert_equal(img.header['magic'], b'n+1') - assert_equal(img.header['vox_offset'], 0) + assert img.header['magic'] == b'n+1' + assert img.header['vox_offset'] == 0 # make a new pair image, with the single image header pimg = ni1.Nifti1Pair(data, affine, img.header) isio = BytesIO() @@ -174,32 +174,32 @@ def test_two_to_one(): pimg.file_map['header'].fileobj = hsio pimg.to_file_map() # the offset stays at zero (but is 352 on disk) - assert_equal(pimg.header['magic'], b'ni1') - assert_equal(pimg.header['vox_offset'], 0) + assert pimg.header['magic'] == b'ni1' + assert pimg.header['vox_offset'] == 0 assert_array_equal(pimg.get_fdata(), data) # same for from_image, going from single image to pair format ana_img = ana.AnalyzeImage.from_image(img) - assert_equal(ana_img.header['vox_offset'], 0) + assert ana_img.header['vox_offset'] == 0 # back to the single image, save it again to a stringio str_io = BytesIO() img.file_map['image'].fileobj = str_io img.to_file_map() - assert_equal(img.header['vox_offset'], 0) + assert img.header['vox_offset'] == 0 aimg = ana.AnalyzeImage.from_image(img) - assert_equal(aimg.header['vox_offset'], 0) + assert aimg.header['vox_offset'] == 0 aimg = spm99.Spm99AnalyzeImage.from_image(img) - assert_equal(aimg.header['vox_offset'], 0) + assert aimg.header['vox_offset'] == 0 aimg = spm2.Spm2AnalyzeImage.from_image(img) - assert_equal(aimg.header['vox_offset'], 0) + assert aimg.header['vox_offset'] == 0 nfimg = ni1.Nifti1Pair.from_image(img) - assert_equal(nfimg.header['vox_offset'], 0) + assert nfimg.header['vox_offset'] == 0 # now set the vox offset directly hdr = nfimg.header hdr['vox_offset'] = 16 - assert_equal(nfimg.header['vox_offset'], 16) + assert nfimg.header['vox_offset'] == 16 # check it gets properly set by the nifti single image nfimg = ni1.Nifti1Image.from_image(img) - assert_equal(nfimg.header['vox_offset'], 0) + assert nfimg.header['vox_offset'] == 0 def test_negative_load_save(): @@ -260,7 +260,7 @@ def test_filename_save(): nils.save(img, path) rt_img = nils.load(path) assert_array_almost_equal(rt_img.get_fdata(), data) - assert_true(type(rt_img) is loadklass) + assert type(rt_img) is loadklass # delete image to allow file close. Otherwise windows # raises an error when trying to delete the directory del rt_img @@ -274,56 +274,56 @@ def test_analyze_detection(): def wat(hdr): return nils.which_analyze_type(hdr.binaryblock) n1_hdr = Nifti1Header(b'\0' * 348, check=False) - assert_equal(wat(n1_hdr), None) + assert wat(n1_hdr) == None n1_hdr['sizeof_hdr'] = 540 - assert_equal(wat(n1_hdr), 'nifti2') - assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti2') + assert wat(n1_hdr) == 'nifti2' + assert wat(n1_hdr.as_byteswapped()) == 'nifti2' n1_hdr['sizeof_hdr'] = 348 - assert_equal(wat(n1_hdr), 'analyze') - assert_equal(wat(n1_hdr.as_byteswapped()), 'analyze') + assert wat(n1_hdr) == 'analyze' + assert wat(n1_hdr.as_byteswapped()) == 'analyze' n1_hdr['magic'] = b'n+1' - assert_equal(wat(n1_hdr), 'nifti1') - assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1') + assert wat(n1_hdr) == 'nifti1' + assert wat(n1_hdr.as_byteswapped()) == 'nifti1' n1_hdr['magic'] = b'ni1' - assert_equal(wat(n1_hdr), 'nifti1') - assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1') + assert wat(n1_hdr) == 'nifti1' + assert wat(n1_hdr.as_byteswapped()) == 'nifti1' # Doesn't matter what magic is if it's not a nifti1 magic n1_hdr['magic'] = b'ni2' - assert_equal(wat(n1_hdr), 'analyze') + assert wat(n1_hdr) == 'analyze' n1_hdr['sizeof_hdr'] = 0 n1_hdr['magic'] = b'' - assert_equal(wat(n1_hdr), None) + assert wat(n1_hdr) == None n1_hdr['magic'] = 'n+1' - assert_equal(wat(n1_hdr), 'nifti1') + assert wat(n1_hdr) == 'nifti1' n1_hdr['magic'] = 'ni1' - assert_equal(wat(n1_hdr), 'nifti1') + assert wat(n1_hdr) == 'nifti1' def test_guessed_image_type(): # Test whether we can guess the image type from example files - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'example4d.nii.gz')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'example4d.nii.gz')) == Nifti1Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'nifti1.hdr')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'nifti1.hdr')) == Nifti1Pair) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'example_nifti2.nii.gz')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'example_nifti2.nii.gz')) == Nifti2Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'nifti2.hdr')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'nifti2.hdr')) == Nifti2Pair) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'tiny.mnc')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'tiny.mnc')) == Minc1Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'small.mnc')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'small.mnc')) == Minc2Image) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'test.mgz')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'test.mgz')) == MGHImage) - assert_equal(nils.guessed_image_type( - pjoin(DATA_PATH, 'analyze.hdr')), + assert (nils.guessed_image_type( + pjoin(DATA_PATH, 'analyze.hdr')) == Spm2AnalyzeImage) @@ -333,6 +333,6 @@ def test_fail_save(): affine = np.eye(4, dtype=np.float32) img = SpatialImage(dataobj, affine) # Fails because float16 is not supported. - with assert_raises(AttributeError): + with pytest.raises(AttributeError): nils.save(img, 'foo.nii.gz') del img diff --git a/nibabel/tests/test_image_types.py b/nibabel/tests/test_image_types.py index 3ffc65eead..632e23224d 100644 --- a/nibabel/tests/test_image_types.py +++ b/nibabel/tests/test_image_types.py @@ -20,7 +20,6 @@ Spm2AnalyzeImage, Spm99AnalyzeImage, MGHImage, all_image_classes) -from nose.tools import assert_true DATA_PATH = pjoin(dirname(__file__), 'data') @@ -64,7 +63,7 @@ def check_img(img_path, img_klass, sniff_mode, sniff, expect_success, 'sizeof_hdr', 0) current_sizeof_hdr = 0 if new_sniff is None else \ len(new_sniff[0]) - assert_true(current_sizeof_hdr >= expected_sizeof_hdr, new_msg) + assert current_sizeof_hdr >= expected_sizeof_hdr, new_msg # Check that the image type was recognized. new_msg = '%s (%s) image is%s a %s image.' % ( @@ -72,7 +71,7 @@ def check_img(img_path, img_klass, sniff_mode, sniff, expect_success, msg, '' if is_img else ' not', img_klass.__name__) - assert_true(is_img, new_msg) + assert is_img, new_msg if sniff_mode == 'vanilla': return new_sniff diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 12232c42e4..8fc0da4908 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -15,9 +15,8 @@ from nibabel import imageclasses from nibabel.imageclasses import spatial_axes_first, class_map, ext_map -from nose.tools import (assert_true, assert_false, assert_equal) -from nibabel.testing import clear_and_catch_warnings +from nibabel.testing_pytest import clear_and_catch_warnings DATA_DIR = pjoin(dirname(__file__), 'data') @@ -37,26 +36,26 @@ def test_spatial_axes_first(): for img_class in (AnalyzeImage, Nifti1Image, Nifti2Image): data = np.zeros(shape) img = img_class(data, affine) - assert_true(spatial_axes_first(img)) + assert spatial_axes_first(img) # True for MINC images < 4D for fname in MINC_3DS: img = nib.load(pjoin(DATA_DIR, fname)) - assert_true(len(img.shape) == 3) - assert_true(spatial_axes_first(img)) + assert len(img.shape) == 3 + assert spatial_axes_first(img) # False for MINC images < 4D for fname in MINC_4DS: img = nib.load(pjoin(DATA_DIR, fname)) - assert_true(len(img.shape) == 4) - assert_false(spatial_axes_first(img)) + assert len(img.shape) == 4 + assert not spatial_axes_first(img) def test_deprecations(): with clear_and_catch_warnings(modules=[imageclasses]) as w: warnings.filterwarnings('always', category=DeprecationWarning) nifti_single = class_map['nifti_single'] - assert_equal(nifti_single['class'], Nifti1Image) - assert_equal(len(w), 1) + assert nifti_single['class'] == Nifti1Image + assert len(w) == 1 nifti_ext = ext_map['.nii'] - assert_equal(nifti_ext, 'nifti_single') - assert_equal(len(w), 2) + assert nifti_ext == 'nifti_single' + assert len(w) == 2 diff --git a/nibabel/tests/test_imageglobals.py b/nibabel/tests/test_imageglobals.py index f730a4db01..42cbe6fdce 100644 --- a/nibabel/tests/test_imageglobals.py +++ b/nibabel/tests/test_imageglobals.py @@ -8,10 +8,6 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """ Tests for imageglobals module """ - -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) - from .. import imageglobals as igs @@ -19,5 +15,5 @@ def test_errorlevel(): orig_level = igs.error_level for level in (10, 20, 30): with igs.ErrorLevel(level): - assert_equal(igs.error_level, level) - assert_equal(igs.error_level, orig_level) + assert igs.error_level == level + assert igs.error_level == orig_level diff --git a/nibabel/tests/test_keywordonly.py b/nibabel/tests/test_keywordonly.py index 0ef63d9b13..26e21ce02d 100644 --- a/nibabel/tests/test_keywordonly.py +++ b/nibabel/tests/test_keywordonly.py @@ -2,8 +2,7 @@ from ..keywordonly import kw_only_func, kw_only_meth -from nose.tools import assert_equal -from nose.tools import assert_raises +import pytest def test_kw_only_func(): @@ -11,23 +10,28 @@ def test_kw_only_func(): def func(an_arg): "My docstring" return an_arg - assert_equal(func(1), 1) - assert_raises(TypeError, func, 1, 2) + assert func(1) == 1 + with pytest.raises(TypeError): + func(1, 2) dec_func = kw_only_func(1)(func) - assert_equal(dec_func(1), 1) - assert_raises(TypeError, dec_func, 1, 2) - assert_raises(TypeError, dec_func, 1, akeyarg=3) - assert_equal(dec_func.__doc__, 'My docstring') + assert dec_func(1) == 1 + with pytest.raises(TypeError): + dec_func(1, 2) + with pytest.raises(TypeError): + dec_func(1, akeyarg=3) + assert dec_func.__doc__ == 'My docstring' @kw_only_func(1) def kw_func(an_arg, a_kwarg='thing'): "Another docstring" return an_arg, a_kwarg - assert_equal(kw_func(1), (1, 'thing')) - assert_raises(TypeError, kw_func, 1, 2) - assert_equal(kw_func(1, a_kwarg=2), (1, 2)) - assert_raises(TypeError, kw_func, 1, akeyarg=3) - assert_equal(kw_func.__doc__, 'Another docstring') + assert kw_func(1) == (1, 'thing') + with pytest.raises(TypeError): + kw_func(1, 2) + assert kw_func(1, a_kwarg=2) == (1, 2) + with pytest.raises(TypeError): + kw_func(1, akeyarg=3) + assert kw_func.__doc__ == 'Another docstring' class C(object): @@ -36,8 +40,10 @@ def kw_meth(self, an_arg, a_kwarg='thing'): "Method docstring" return an_arg, a_kwarg c = C() - assert_equal(c.kw_meth(1), (1, 'thing')) - assert_raises(TypeError, c.kw_meth, 1, 2) - assert_equal(c.kw_meth(1, a_kwarg=2), (1, 2)) - assert_raises(TypeError, c.kw_meth, 1, akeyarg=3) - assert_equal(c.kw_meth.__doc__, 'Method docstring') + assert c.kw_meth(1) == (1, 'thing') + with pytest.raises(TypeError): + c.kw_meth(1, 2) + assert c.kw_meth(1, a_kwarg=2) == (1, 2) + with pytest.raises(TypeError): + c.kw_meth(1, akeyarg=3) + assert c.kw_meth.__doc__ == 'Method docstring' diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 3d7101b6d3..71f0435f1a 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -20,8 +20,7 @@ from numpy.testing import (assert_almost_equal, assert_array_equal) -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) +import pytest data_path = pjoin(dirname(__file__), 'data') @@ -48,7 +47,7 @@ def test_read_img_data(): # These examples have null scaling - assert prefer=unscaled is the same dao = img.dataobj if hasattr(dao, 'slope') and hasattr(img.header, 'raw_data_from_fileobj'): - assert_equal((dao.slope, dao.inter), (1, 0)) + assert (dao.slope, dao.inter) == (1, 0) assert_array_equal(read_img_data(img, prefer='unscaled'), data) # Assert all caps filename works as well with TemporaryDirectory() as tmpdir: @@ -63,15 +62,16 @@ def test_read_img_data(): def test_file_not_found(): - assert_raises(FileNotFoundError, load, 'does_not_exist.nii.gz') + with pytest.raises(FileNotFoundError): + load('does_not_exist.nii.gz') def test_load_empty_image(): with InTemporaryDirectory(): open('empty.nii', 'w').close() - with assert_raises(ImageFileError) as err: + with pytest.raises(ImageFileError) as err: load('empty.nii') - assert_true(err.exception.args[0].startswith('Empty file: ')) + assert str(err.value).startswith('Empty file: ') def test_read_img_data_nifti(): @@ -86,13 +86,15 @@ def test_read_img_data_nifti(): img = img_class(data, np.eye(4)) img.set_data_dtype(out_dtype) # No filemap => error - assert_raises(ImageFileError, read_img_data, img) + with pytest.raises(ImageFileError): + read_img_data(img) # Make a filemap froot = 'an_image_{0}'.format(i) img.file_map = img.filespec_to_file_map(froot) # Trying to read from this filemap will generate an error because # we are going to read from files that do not exist - assert_raises(IOError, read_img_data, img) + with pytest.raises(IOError): + read_img_data(img) img.to_file_map() # Load - now the scaling and offset correctly applied img_fname = img.file_map['image'].filename @@ -127,8 +129,8 @@ def test_read_img_data_nifti(): else: new_inter = 0 # scaled scaling comes from new parameters in header - assert_true(np.allclose(actual_unscaled * 2.1 + new_inter, - read_img_data(img_back))) + assert np.allclose(actual_unscaled * 2.1 + new_inter, + read_img_data(img_back)) # Unscaled array didn't change assert_array_equal(actual_unscaled, read_img_data(img_back, prefer='unscaled')) diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index c7b044ed9b..5b53d021c4 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -24,10 +24,10 @@ from ..minc1 import Minc1File, Minc1Image, MincHeader from ..tmpdirs import InTemporaryDirectory -from ..testing import (assert_true, assert_equal, assert_false, assert_raises, assert_warns, - assert_array_equal, data_path, clear_and_catch_warnings) from ..deprecator import ExpiredDeprecationError -from ..testing_pytest import assert_data_similar +from ..testing_pytest import assert_data_similar, data_path, clear_and_catch_warnings +from numpy.testing import assert_array_equal +import pytest from . import test_spatialimages as tsi from .test_fileslice import slicer_samples @@ -107,14 +107,13 @@ def test_old_namespace(): aff = np.diag([2, 3, 4, 1]) from .. import Minc1Image, MincImage - assert_false(Minc1Image is MincImage) - with assert_raises(ExpiredDeprecationError): + assert Minc1Image is not MincImage + with pytest.raises(ExpiredDeprecationError): MincImage(arr, aff) - assert_equal(warns, []) # Another old name from ..minc1 import MincFile, Minc1File assert_false(MincFile is Minc1File) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): mf = MincFile(netcdf_file(EG_FNAME)) @@ -129,12 +128,12 @@ def test_mincfile(self): for tp in self.test_files: mnc_obj = self.opener(tp['fname'], 'r') mnc = self.file_class(mnc_obj) - assert_equal(mnc.get_data_dtype().type, tp['dtype']) - assert_equal(mnc.get_data_shape(), tp['shape']) - assert_equal(mnc.get_zooms(), tp['zooms']) + assert mnc.get_data_dtype().type == tp['dtype'] + assert mnc.get_data_shape() == tp['shape'] + assert mnc.get_zooms() == tp['zooms'] assert_array_equal(mnc.get_affine(), tp['affine']) data = mnc.get_scaled_data() - assert_equal(data.shape, tp['shape']) + assert data.shape == tp['shape'] def test_mincfile_slicing(self): # Test slicing and scaling of mincfile data @@ -158,7 +157,7 @@ def test_load(self): for tp in self.test_files: img = load(tp['fname']) data = img.get_fdata() - assert_equal(data.shape, tp['shape']) + assert data.shape == tp['shape'] # min, max, mean values from read in SPM2 / minctools assert_data_similar(data, tp) # check if mnc can be converted to nifti @@ -172,7 +171,7 @@ def test_array_proxy_slicing(self): img = load(tp['fname']) arr = img.get_fdata() prox = img.dataobj - assert_true(prox.is_proxy) + assert prox.is_proxy for sliceobj in slicer_samples(img.shape): assert_array_equal(arr[sliceobj], prox[sliceobj]) @@ -202,8 +201,10 @@ def test_header_data_io(): bio = BytesIO() hdr = MincHeader() arr = np.arange(24).reshape((2, 3, 4)) - assert_raises(NotImplementedError, hdr.data_to_fileobj, arr, bio) - assert_raises(NotImplementedError, hdr.data_from_fileobj, bio) + with pytest.raises(NotImplementedError): + hdr.data_to_fileobj(arr, bio) + with pytest.raises(NotImplementedError): + hdr.data_from_fileobj(bio) class TestMinc1Image(tsi.TestSpatialImage): @@ -217,7 +218,7 @@ def test_data_to_from_fileobj(self): img = self.module.load(fpath) bio = BytesIO() arr = np.arange(24).reshape((2, 3, 4)) - assert_raises(NotImplementedError, - img.header.data_to_fileobj, arr, bio) - assert_raises(NotImplementedError, - img.header.data_from_fileobj, bio) + with pytest.raises(NotImplementedError): + img.header.data_to_fileobj(arr, bio) + with pytest.raises(NotImplementedError): + img.header.data_from_fileobj(bio) diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index f4367cbc11..5032f01480 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -15,9 +15,7 @@ from ..minc2 import Minc2File, Minc2Image from .._h5py_compat import h5py, have_h5py, setup_module -from nose.tools import (assert_true, assert_equal, assert_false, assert_raises) - -from ..testing import data_path +from ..testing_pytest import data_path from . import test_minc1 as tm2 diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index ebfafa938f..6d5a4b0e35 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -19,7 +19,6 @@ from .nibabel_data import get_nibabel_data, needs_nibabel_data from .. import load as top_load, Nifti1Image -from nose.tools import assert_equal from numpy.testing import (assert_array_equal, assert_almost_equal) MINC2_PATH = pjoin(get_nibabel_data(), 'nitest-minc2') @@ -58,14 +57,14 @@ class TestEPIFrame(object): def test_load(self): # Check highest level load of minc works img = self.opener(self.example_params['fname']) - assert_equal(img.shape, self.example_params['shape']) + assert img.shape == self.example_params['shape'] assert_almost_equal(img.header.get_zooms(), self.example_params['zooms'], 5) assert_almost_equal(img.affine, self.example_params['affine'], 4) - assert_equal(img.get_data_dtype().type, self.example_params['type']) + assert img.get_data_dtype().type == self.example_params['type'] # Check correspondence of data and recorded shape data = img.get_fdata() - assert_equal(data.shape, self.example_params['shape']) + assert data.shape == self.example_params['shape'] # min, max, mean values from read in SPM2 assert_almost_equal(data.min(), self.example_params['min'], 4) assert_almost_equal(data.max(), self.example_params['max'], 4) diff --git a/nibabel/tests/test_mriutils.py b/nibabel/tests/test_mriutils.py index 527afc61ba..8c6b198c95 100644 --- a/nibabel/tests/test_mriutils.py +++ b/nibabel/tests/test_mriutils.py @@ -10,12 +10,8 @@ """ -from numpy.testing import (assert_almost_equal, - assert_array_equal) - -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) - +from numpy.testing import assert_almost_equal +import pytest from ..mriutils import calculate_dwell_time, MRIError @@ -28,5 +24,7 @@ def test_calculate_dwell_time(): 3.3 / (42.576 * 3.4 * 3 * 3)) # Echo train length of 1 is valid, but returns 0 dwell time assert_almost_equal(calculate_dwell_time(3.3, 1, 3), 0) - assert_raises(MRIError, calculate_dwell_time, 3.3, 0, 3.0) - assert_raises(MRIError, calculate_dwell_time, 3.3, 2, -0.1) + with pytest.raises(MRIError): + calculate_dwell_time(3.3, 0, 3.0) + with pytest.raises(MRIError): + calculate_dwell_time(3.3, 2, -0.1) From ab2f5dcc09e6b52f4b0cd7936862349bf51b46ac Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 15 Nov 2019 20:01:52 -0500 Subject: [PATCH 454/689] converting more tests --- .travis.yml | 1 + nibabel/optpkg.py | 1 + nibabel/tests/test_nibabel_data.py | 9 +- nibabel/tests/test_nifti1.py | 690 +++++++++++++++-------------- nibabel/tests/test_nifti2.py | 13 +- nibabel/tests/test_openers.py | 109 ++--- nibabel/tests/test_optpkg.py | 26 +- nibabel/tests/test_orientations.py | 99 ++--- nibabel/tests/test_parrec.py | 228 +++++----- nibabel/tests/test_parrec_data.py | 20 +- nibabel/tests/test_pkg_info.py | 92 ++-- nibabel/tests/test_processing.py | 77 ++-- nibabel/tests/test_proxy_api.py | 43 +- nibabel/tests/test_quaternions.py | 132 +++--- 14 files changed, 797 insertions(+), 743 deletions(-) diff --git a/.travis.yml b/.travis.yml index 43ddf9dde1..f4fff97719 100644 --- a/.travis.yml +++ b/.travis.yml @@ -133,6 +133,7 @@ script: pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py pytest -v../nibabel/tests/test_e*.py ../nibabel/tests/test_f*.py ../nibabel/tests/test_h*.py pytest -v../nibabel/tests/test_i*.py ../nibabel/tests/test_k*.py ../nibabel/tests/test_l*.py ../nibabel/tests/test_m*.py + pytest -v../nibabel/tests/test_n*.py ../nibabel/tests/test_o*.py ../nibabel/tests/test_p*.py ../nibabel/tests/test_q*.py pytest -v ../nibabel/tests/test_w*.py else false diff --git a/nibabel/optpkg.py b/nibabel/optpkg.py index d52329f186..f87f64da9f 100644 --- a/nibabel/optpkg.py +++ b/nibabel/optpkg.py @@ -115,6 +115,7 @@ def optional_package(name, trip_msg=None, min_version=None): % (name, name, exc)) pkg = TripWire(trip_msg) + # TODO dj: no clue why is it needed... def setup_module(): if have_nose: import nose diff --git a/nibabel/tests/test_nibabel_data.py b/nibabel/tests/test_nibabel_data.py index f804f7499f..86e94f5c34 100644 --- a/nibabel/tests/test_nibabel_data.py +++ b/nibabel/tests/test_nibabel_data.py @@ -6,7 +6,6 @@ from . import nibabel_data as nibd -from nose.tools import assert_equal MY_DIR = dirname(__file__) @@ -23,10 +22,10 @@ def test_get_nibabel_data(): # Test getting directory local_data = realpath(pjoin(MY_DIR, '..', '..', 'nibabel-data')) if isdir(local_data): - assert_equal(nibd.get_nibabel_data(), local_data) + assert nibd.get_nibabel_data() == local_data else: - assert_equal(nibd.get_nibabel_data(), '') + assert nibd.get_nibabel_data() == '' nibd.environ['NIBABEL_DATA_DIR'] = 'not_a_path' - assert_equal(nibd.get_nibabel_data(), '') + assert nibd.get_nibabel_data() == '' nibd.environ['NIBABEL_DATA_DIR'] = MY_DIR - assert_equal(nibd.get_nibabel_data(), MY_DIR) + assert nibd.get_nibabel_data() == MY_DIR diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index dd98972df5..ef663f6e7b 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -34,15 +34,14 @@ from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) -from nose.tools import (assert_true, assert_false, assert_equal, - assert_raises) -from ..testing import ( +from ..testing_pytest import ( clear_and_catch_warnings, data_path, runif_extra_has, suppress_warnings, ) +import pytest from . import test_analyze as tana from . import test_spm99analyze as tspm @@ -61,7 +60,43 @@ A[:3, :3] = np.array(R) * Z # broadcasting does the job A[:3, 3] = T - +HDE = HeaderDataError +header_examples_list = \ + [ + ((None, None), None, (None, None), (np.nan, np.nan)), + ((np.nan, None), None, (None, None), (np.nan, np.nan)), + ((None, np.nan), None, (None, None), (np.nan, np.nan)), + ((np.nan, np.nan), None, (None, None), (np.nan, np.nan)), + # Can only be one null + ((None, 0), HDE, (None, None), (np.nan, 0)), + ((np.nan, 0), HDE, (None, None), (np.nan, 0)), + ((1, None), HDE, (None, None), (1, np.nan)), + ((1, np.nan), HDE, (None, None), (1, np.nan)), + # Bad slope plus anything generates an error + ((0, 0), HDE, (None, None), (0, 0)), + ((0, None), HDE, (None, None), (0, np.nan)), + ((0, np.nan), HDE, (None, None), (0, np.nan)), + ((0, np.inf), HDE, (None, None), (0, np.inf)), + ((0, -np.inf), HDE, (None, None), (0, -np.inf)), + ((np.inf, 0), HDE, (None, None), (np.inf, 0)), + ((np.inf, None), HDE, (None, None), (np.inf, np.nan)), + ((np.inf, np.nan), HDE, (None, None), (np.inf, np.nan)), + ((np.inf, np.inf), HDE, (None, None), (np.inf, np.inf)), + ((np.inf, -np.inf), HDE, (None, None), (np.inf, -np.inf)), + ((-np.inf, 0), HDE, (None, None), (-np.inf, 0)), + ((-np.inf, None), HDE, (None, None), (-np.inf, np.nan)), + ((-np.inf, np.nan), HDE, (None, None), (-np.inf, np.nan)), + ((-np.inf, np.inf), HDE, (None, None), (-np.inf, np.inf)), + ((-np.inf, -np.inf), HDE, (None, None), (-np.inf, -np.inf)), + # Good slope and bad inter generates error for get_slope_inter + ((2, None), HDE, HDE, (2, np.nan)), + ((2, np.nan), HDE, HDE, (2, np.nan)), + ((2, np.inf), HDE, HDE, (2, np.inf)), + ((2, -np.inf), HDE, HDE, (2, -np.inf)), + # Good slope and inter - you guessed it + ((2, 0), None, (2, 0), (2, 0)), + ((2, 1), None, (2, 1), (2, 1)) + ] class TestNifti1PairHeader(tana.TestAnalyzeHeader, tspm.HeaderScalingMixin): header_class = Nifti1PairHeader example_file = header_file @@ -82,15 +117,15 @@ class TestNifti1PairHeader(tana.TestAnalyzeHeader, tspm.HeaderScalingMixin): def test_empty(self): tana.TestAnalyzeHeader.test_empty(self) hdr = self.header_class() - assert_equal(hdr['magic'], hdr.pair_magic) - assert_equal(hdr['scl_slope'], 1) - assert_equal(hdr['vox_offset'], 0) + assert hdr['magic'] == hdr.pair_magic + assert hdr['scl_slope'] == 1 + assert hdr['vox_offset'] == 0 def test_from_eg_file(self): hdr = self.header_class.from_fileobj(open(self.example_file, 'rb')) - assert_equal(hdr.endianness, '<') - assert_equal(hdr['magic'], hdr.pair_magic) - assert_equal(hdr['sizeof_hdr'], self.sizeof_hdr) + assert hdr.endianness == '<' + assert hdr['magic'] == hdr.pair_magic + assert hdr['sizeof_hdr'] == self.sizeof_hdr def test_data_scaling(self): # Test scaling in header @@ -109,7 +144,7 @@ def test_data_scaling(self): hdr.set_data_dtype(np.int8) hdr.set_slope_inter(1, 0) hdr.data_to_fileobj(data, S, rescale=True) - assert_false(np.allclose(hdr.get_slope_inter(), (1, 0))) + assert not np.allclose(hdr.get_slope_inter(), (1, 0)) rdata = hdr.data_from_fileobj(S) assert_array_almost_equal(data, rdata) # Without scaling does rounding, doesn't alter scaling @@ -133,62 +168,29 @@ def test_big_scaling(self): data = np.array([finf['min'], finf['max']], dtype=dtt)[:, None, None] hdr.data_to_fileobj(data, sio) data_back = hdr.data_from_fileobj(sio) - assert_true(np.allclose(data, data_back)) + assert np.allclose(data, data_back) def test_slope_inter(self): hdr = self.header_class() - nan, inf, minf = np.nan, np.inf, -np.inf - HDE = HeaderDataError - assert_equal(hdr.get_slope_inter(), (1.0, 0.0)) - for in_tup, exp_err, out_tup, raw_values in ( + assert hdr.get_slope_inter() == (1.0, 0.0) + for in_tup, exp_err, out_tup, raw_values in header_examples_list: # Null scalings - ((None, None), None, (None, None), (nan, nan)), - ((nan, None), None, (None, None), (nan, nan)), - ((None, nan), None, (None, None), (nan, nan)), - ((nan, nan), None, (None, None), (nan, nan)), - # Can only be one null - ((None, 0), HDE, (None, None), (nan, 0)), - ((nan, 0), HDE, (None, None), (nan, 0)), - ((1, None), HDE, (None, None), (1, nan)), - ((1, nan), HDE, (None, None), (1, nan)), - # Bad slope plus anything generates an error - ((0, 0), HDE, (None, None), (0, 0)), - ((0, None), HDE, (None, None), (0, nan)), - ((0, nan), HDE, (None, None), (0, nan)), - ((0, inf), HDE, (None, None), (0, inf)), - ((0, minf), HDE, (None, None), (0, minf)), - ((inf, 0), HDE, (None, None), (inf, 0)), - ((inf, None), HDE, (None, None), (inf, nan)), - ((inf, nan), HDE, (None, None), (inf, nan)), - ((inf, inf), HDE, (None, None), (inf, inf)), - ((inf, minf), HDE, (None, None), (inf, minf)), - ((minf, 0), HDE, (None, None), (minf, 0)), - ((minf, None), HDE, (None, None), (minf, nan)), - ((minf, nan), HDE, (None, None), (minf, nan)), - ((minf, inf), HDE, (None, None), (minf, inf)), - ((minf, minf), HDE, (None, None), (minf, minf)), - # Good slope and bad inter generates error for get_slope_inter - ((2, None), HDE, HDE, (2, nan)), - ((2, nan), HDE, HDE, (2, nan)), - ((2, inf), HDE, HDE, (2, inf)), - ((2, minf), HDE, HDE, (2, minf)), - # Good slope and inter - you guessed it - ((2, 0), None, (2, 0), (2, 0)), - ((2, 1), None, (2, 1), (2, 1))): hdr = self.header_class() if not exp_err is None: - assert_raises(exp_err, hdr.set_slope_inter, *in_tup) + with pytest.raises(exp_err): + hdr.set_slope_inter(*in_tup) in_list = [v if not v is None else np.nan for v in in_tup] hdr['scl_slope'], hdr['scl_inter'] = in_list else: hdr.set_slope_inter(*in_tup) if isinstance(out_tup, Exception): - assert_raises(out_tup, hdr.get_slope_inter) + with pytest.raises(out_tup): + hdr.get_slope_inter() else: - assert_equal(hdr.get_slope_inter(), out_tup) + assert hdr.get_slope_inter() == out_tup # Check set survives through checking hdr = self.header_class.from_header(hdr, check=True) - assert_equal(hdr.get_slope_inter(), out_tup) + assert hdr.get_slope_inter() == out_tup assert_array_equal([hdr['scl_slope'], hdr['scl_inter']], raw_values) @@ -203,8 +205,8 @@ def test_nifti_qfac_checks(self): # 0 is not hdr['pixdim'][0] = 0 fhdr, message, raiser = self.log_chk(hdr, 20) - assert_equal(fhdr['pixdim'][0], 1) - assert_equal(message, + assert fhdr['pixdim'][0] == 1 + assert (message == 'pixdim[0] (qfac) should be 1 ' '(default) or -1; setting qfac to 1') @@ -215,14 +217,14 @@ def test_nifti_qsform_checks(self): hdr = HC() hdr['qform_code'] = -1 fhdr, message, raiser = self.log_chk(hdr, 30) - assert_equal(fhdr['qform_code'], 0) - assert_equal(message, + assert fhdr['qform_code'] == 0 + assert (message == 'qform_code -1 not valid; setting to 0') hdr = HC() hdr['sform_code'] = -1 fhdr, message, raiser = self.log_chk(hdr, 30) - assert_equal(fhdr['sform_code'], 0) - assert_equal(message, + assert fhdr['sform_code'] == 0 + assert (message == 'sform_code -1 not valid; setting to 0') def test_nifti_xform_codes(self): @@ -231,14 +233,16 @@ def test_nifti_xform_codes(self): affine = np.eye(4) for code in nifti1.xform_codes.keys(): hdr.set_qform(affine, code) - assert_equal(hdr['qform_code'], nifti1.xform_codes[code]) + assert hdr['qform_code'] == nifti1.xform_codes[code] hdr.set_sform(affine, code) - assert_equal(hdr['sform_code'], nifti1.xform_codes[code]) + assert hdr['sform_code'] == nifti1.xform_codes[code] # Raise KeyError on unknown code for bad_code in (-1, 6, 10): - assert_raises(KeyError, hdr.set_qform, affine, bad_code) - assert_raises(KeyError, hdr.set_sform, affine, bad_code) + with pytest.raises(KeyError): + hdr.set_qform(affine, bad_code) + with pytest.raises(KeyError): + hdr.set_sform(affine, bad_code) def test_magic_offset_checks(self): # magic and offset @@ -246,8 +250,8 @@ def test_magic_offset_checks(self): hdr = HC() hdr['magic'] = 'ooh' fhdr, message, raiser = self.log_chk(hdr, 45) - assert_equal(fhdr['magic'], b'ooh') - assert_equal(message, + assert fhdr['magic'] == b'ooh' + assert (message == 'magic string "ooh" is not valid; ' 'leaving as is, but future errors are likely') # For pairs, any offset is OK, but should be divisible by 16 @@ -263,8 +267,8 @@ def test_magic_offset_checks(self): self.assert_no_log_err(hdr) hdr['vox_offset'] = bad_spm fhdr, message, raiser = self.log_chk(hdr, 30) - assert_equal(fhdr['vox_offset'], bad_spm) - assert_equal(message, + assert fhdr['vox_offset'] == bad_spm + assert (message == 'vox offset (={0:g}) not divisible by 16, ' 'not SPM compatible; leaving at current ' 'value'.format(bad_spm)) @@ -272,8 +276,8 @@ def test_magic_offset_checks(self): hdr['magic'] = hdr.single_magic hdr['vox_offset'] = 10 fhdr, message, raiser = self.log_chk(hdr, 40) - assert_equal(fhdr['vox_offset'], hdr.single_vox_offset) - assert_equal(message, + assert fhdr['vox_offset'] == hdr.single_vox_offset + assert (message == 'vox offset 10 too low for single ' 'file nifti1; setting to minimum value ' 'of ' + str(hdr.single_vox_offset)) @@ -285,14 +289,14 @@ def test_freesurfer_large_vector_hack(self): # The standard case hdr = HC() hdr.set_data_shape((2, 3, 4)) - assert_equal(hdr.get_data_shape(), (2, 3, 4)) - assert_equal(hdr['glmin'], 0) + assert hdr.get_data_shape() == (2, 3, 4) + assert hdr['glmin'] == 0 # Just left of the freesurfer case dim_type = hdr.template_dtype['dim'].base glmin = hdr.template_dtype['glmin'].base too_big = int(np.iinfo(dim_type).max) + 1 hdr.set_data_shape((too_big - 1, 1, 1)) - assert_equal(hdr.get_data_shape(), (too_big - 1, 1, 1)) + assert hdr.get_data_shape() == (too_big - 1, 1, 1) # The freesurfer case full_shape = (too_big, 1, 1, 1, 1, 1, 1) for dim in range(3, 8): @@ -300,39 +304,37 @@ def test_freesurfer_large_vector_hack(self): expected_dim = np.array([dim, -1, 1, 1, 1, 1, 1, 1]) with suppress_warnings(): hdr.set_data_shape(full_shape[:dim]) - assert_equal(hdr.get_data_shape(), full_shape[:dim]) + assert hdr.get_data_shape() == full_shape[:dim] assert_array_equal(hdr['dim'], expected_dim) - assert_equal(hdr['glmin'], too_big) + assert hdr['glmin'] == too_big # Allow the fourth dimension to vary with suppress_warnings(): hdr.set_data_shape((too_big, 1, 1, 4)) - assert_equal(hdr.get_data_shape(), (too_big, 1, 1, 4)) + assert hdr.get_data_shape() == (too_big, 1, 1, 4) assert_array_equal(hdr['dim'][:5], np.array([4, -1, 1, 1, 4])) # This only works when the first 3 dimensions are -1, 1, 1 - assert_raises(HeaderDataError, hdr.set_data_shape, (too_big,)) - assert_raises(HeaderDataError, hdr.set_data_shape, (too_big, 1)) - assert_raises(HeaderDataError, hdr.set_data_shape, (too_big, 1, 2)) - assert_raises(HeaderDataError, hdr.set_data_shape, (too_big, 2, 1)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, too_big)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, too_big, 1)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, 1, too_big)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, 1, 1, too_big)) + for args in [(too_big,), (too_big, 1), (too_big, 1, 2), (too_big, 2, 1), + (1, too_big), (1, too_big, 1), (1, 1, too_big), (1, 1, 1, too_big)]: + with pytest.raises(HeaderDataError): + hdr.set_data_shape(args) # Outside range of glmin raises error far_too_big = int(np.iinfo(glmin).max) + 1 with suppress_warnings(): hdr.set_data_shape((far_too_big - 1, 1, 1)) - assert_equal(hdr.get_data_shape(), (far_too_big - 1, 1, 1)) - assert_raises(HeaderDataError, hdr.set_data_shape, (far_too_big, 1, 1)) + assert hdr.get_data_shape() == (far_too_big - 1, 1, 1) + with pytest.raises(HeaderDataError): + hdr.set_data_shape((far_too_big, 1, 1)) # glmin of zero raises error (implausible vector length) hdr.set_data_shape((-1, 1, 1)) hdr['glmin'] = 0 - assert_raises(HeaderDataError, hdr.get_data_shape) + with pytest.raises(HeaderDataError): + hdr.get_data_shape() # Lists or tuples or arrays will work for setting shape for shape in ((too_big - 1, 1, 1), (too_big, 1, 1)): for constructor in (list, tuple, np.array): with suppress_warnings(): hdr.set_data_shape(constructor(shape)) - assert_equal(hdr.get_data_shape(), shape) + assert hdr.get_data_shape() == shape @needs_nibabel_data('nitest-freesurfer') def test_freesurfer_ico7_hack(self): @@ -343,24 +345,24 @@ def test_freesurfer_ico7_hack(self): for dim in range(3, 8): expected_dim = np.array([dim, 27307, 1, 6, 1, 1, 1, 1]) hdr.set_data_shape(full_shape[:dim]) - assert_equal(hdr.get_data_shape(), full_shape[:dim]) + assert hdr.get_data_shape() == full_shape[:dim] assert_array_equal(hdr._structarr['dim'], expected_dim) # Only works on dimensions >= 3 - assert_raises(HeaderDataError, hdr.set_data_shape, full_shape[:1]) - assert_raises(HeaderDataError, hdr.set_data_shape, full_shape[:2]) - # Bad shapes - assert_raises(HeaderDataError, hdr.set_data_shape, (163842, 2, 1)) - assert_raises(HeaderDataError, hdr.set_data_shape, (163842, 1, 2)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, 163842, 1)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, 1, 163842)) - assert_raises(HeaderDataError, hdr.set_data_shape, (1, 1, 1, 163842)) + for args in [ + # Only works on dimensions >= 3 + full_shape[:1], full_shape[:2], + # Bad shapes + (163842, 2, 1), (163842, 1, 2), (1, 163842, 1), + (1, 1, 163842), (1, 1, 1, 163842)]: + with pytest.raises(HeaderDataError): + hdr.set_data_shape(args) # Test consistency of data in .mgh and mri_convert produced .nii nitest_path = os.path.join(get_nibabel_data(), 'nitest-freesurfer') mgh = mghload(os.path.join(nitest_path, 'fsaverage', 'surf', 'lh.orig.avg.area.mgh')) nii = load(os.path.join(nitest_path, 'derivative', 'fsaverage', 'surf', 'lh.orig.avg.area.nii')) - assert_equal(mgh.shape, nii.shape) + assert mgh.shape == nii.shape assert_array_equal(mgh.get_fdata(), nii.get_fdata()) assert_array_equal(nii.header._structarr['dim'][1:4], np.array([27307, 1, 6])) @@ -368,7 +370,7 @@ def test_freesurfer_ico7_hack(self): with InTemporaryDirectory(): nii.to_filename('test.nii') nii2 = load('test.nii') - assert_equal(nii.shape, nii2.shape) + assert nii.shape == nii2.shape assert_array_equal(nii.get_fdata(), nii2.get_fdata()) assert_array_equal(nii.affine, nii2.affine) @@ -379,8 +381,8 @@ def test_qform_sform(self): empty_sform = np.zeros((4, 4)) empty_sform[-1, -1] = 1 assert_array_equal(hdr.get_sform(), empty_sform) - assert_equal(hdr.get_qform(coded=True), (None, 0)) - assert_equal(hdr.get_sform(coded=True), (None, 0)) + assert hdr.get_qform(coded=True) == (None, 0) + assert hdr.get_sform(coded=True) == (None, 0) # Affines with no shears nice_aff = np.diag([2, 3, 4, 1]) another_aff = np.diag([3, 4, 5, 1]) @@ -388,39 +390,39 @@ def test_qform_sform(self): nasty_aff = from_matvec(np.arange(9).reshape((3, 3)), [9, 10, 11]) nasty_aff[0, 0] = 1 # Make full rank fixed_aff = unshear_44(nasty_aff) - assert_false(np.allclose(fixed_aff, nasty_aff)) + assert not np.allclose(fixed_aff, nasty_aff) for in_meth, out_meth in ((hdr.set_qform, hdr.get_qform), (hdr.set_sform, hdr.get_sform)): in_meth(nice_aff, 2) aff, code = out_meth(coded=True) assert_array_equal(aff, nice_aff) - assert_equal(code, 2) + assert code == 2 assert_array_equal(out_meth(), nice_aff) # non coded # Affine may be passed if code == 0, and will get set into header, # but the returned affine with 'coded=True' will be None. in_meth(another_aff, 0) - assert_equal(out_meth(coded=True), (None, 0)) # coded -> None + assert out_meth(coded=True) == (None, 0) # coded -> None assert_array_almost_equal(out_meth(), another_aff) # else -> input # Default qform code when previous == 0 is 2 in_meth(nice_aff) aff, code = out_meth(coded=True) - assert_equal(code, 2) + assert code == 2 # Unless code was non-zero before in_meth(nice_aff, 1) in_meth(nice_aff) aff, code = out_meth(coded=True) - assert_equal(code, 1) + assert code == 1 # Can set code without modifying affine, by passing affine=None assert_array_equal(aff, nice_aff) # affine same as before in_meth(None, 3) aff, code = out_meth(coded=True) assert_array_equal(aff, nice_aff) # affine same as before - assert_equal(code, 3) + assert code == 3 # affine is None on its own, or with code==0, resets code to 0 in_meth(None, 0) - assert_equal(out_meth(coded=True), (None, 0)) + assert out_meth(coded=True) == (None, 0) in_meth(None) - assert_equal(out_meth(coded=True), (None, 0)) + assert out_meth(coded=True) == (None, 0) # List works as input in_meth(nice_aff.tolist()) assert_array_equal(out_meth(), nice_aff) @@ -429,17 +431,18 @@ def test_qform_sform(self): hdr.set_qform(nasty_aff, 1) assert_array_almost_equal(hdr.get_qform(), fixed_aff) # Unless allow_shears is False - assert_raises(HeaderDataError, hdr.set_qform, nasty_aff, 1, False) + with pytest.raises(HeaderDataError): + hdr.set_qform(nasty_aff, 1, False) # Reset sform, give qform a code, to test sform hdr.set_sform(None) hdr.set_qform(nice_aff, 1) # Check sform unchanged by setting qform - assert_equal(hdr.get_sform(coded=True), (None, 0)) + assert hdr.get_sform(coded=True) == (None, 0) # Setting does change the sform ouput hdr.set_sform(nasty_aff, 1) aff, code = hdr.get_sform(coded=True) assert_array_equal(aff, nasty_aff) - assert_equal(code, 1) + assert code == 1 def test_datatypes(self): hdr = self.header_class() @@ -448,9 +451,7 @@ def test_datatypes(self): if dt == np.void: continue hdr.set_data_dtype(code) - (assert_equal, - hdr.get_data_dtype(), - data_type_codes.dtype[code]) + assert hdr.get_data_dtype(), data_type_codes.dtype[code] # Check that checks also see new datatypes hdr.set_data_dtype(np.complex128) hdr.check_fix() @@ -460,11 +461,11 @@ def test_quaternion(self): hdr['quatern_b'] = 0 hdr['quatern_c'] = 0 hdr['quatern_d'] = 0 - assert_true(np.allclose(hdr.get_qform_quaternion(), [1.0, 0, 0, 0])) + assert np.allclose(hdr.get_qform_quaternion(), [1.0, 0, 0, 0]) hdr['quatern_b'] = 1 hdr['quatern_c'] = 0 hdr['quatern_d'] = 0 - assert_true(np.allclose(hdr.get_qform_quaternion(), [0, 1, 0, 0])) + assert np.allclose(hdr.get_qform_quaternion(), [0, 1, 0, 0]) # Check threshold set correctly for float32 hdr['quatern_b'] = 1 + np.finfo(self.quat_dtype).eps assert_array_almost_equal(hdr.get_qform_quaternion(), [0, 1, 0, 0]) @@ -474,35 +475,36 @@ def test_qform(self): ehdr = self.header_class() ehdr.set_qform(A) qA = ehdr.get_qform() - assert_true, np.allclose(A, qA, atol=1e-5) - assert_true, np.allclose(Z, ehdr['pixdim'][1:4]) + assert np.allclose(A, qA, atol=1e-5) + assert np.allclose(Z, ehdr['pixdim'][1:4]) xfas = nifti1.xform_codes - assert_true, ehdr['qform_code'] == xfas['aligned'] + assert ehdr['qform_code'] == xfas['aligned'] ehdr.set_qform(A, 'scanner') - assert_true, ehdr['qform_code'] == xfas['scanner'] + assert ehdr['qform_code'] == xfas['scanner'] ehdr.set_qform(A, xfas['aligned']) - assert_true, ehdr['qform_code'] == xfas['aligned'] + assert ehdr['qform_code'] == xfas['aligned'] # Test pixdims[1,2,3] are checked for negatives for dims in ((-1, 1, 1), (1, -1, 1), (1, 1, -1)): ehdr['pixdim'][1:4] = dims - assert_raises(HeaderDataError, ehdr.get_qform) + with pytest.raises(HeaderDataError): + ehdr.get_qform() def test_sform(self): # Test roundtrip case ehdr = self.header_class() ehdr.set_sform(A) sA = ehdr.get_sform() - assert_true, np.allclose(A, sA, atol=1e-5) + assert np.allclose(A, sA, atol=1e-5) xfas = nifti1.xform_codes - assert_true, ehdr['sform_code'] == xfas['aligned'] + assert ehdr['sform_code'] == xfas['aligned'] ehdr.set_sform(A, 'scanner') - assert_true, ehdr['sform_code'] == xfas['scanner'] + assert ehdr['sform_code'] == xfas['scanner'] ehdr.set_sform(A, xfas['aligned']) - assert_true, ehdr['sform_code'] == xfas['aligned'] + assert ehdr['sform_code'] == xfas['aligned'] def test_dim_info(self): ehdr = self.header_class() - assert_true(ehdr.get_dim_info() == (None, None, None)) + assert ehdr.get_dim_info() == (None, None, None) for info in ((0, 2, 1), (None, None, None), (0, 2, None), @@ -511,18 +513,21 @@ def test_dim_info(self): (None, None, 1), ): ehdr.set_dim_info(*info) - assert_true(ehdr.get_dim_info() == info) + assert ehdr.get_dim_info() == info def test_slice_times(self): hdr = self.header_class() # error if slice dimension not specified - assert_raises(HeaderDataError, hdr.get_slice_times) + with pytest.raises(HeaderDataError): + hdr.get_slice_times() hdr.set_dim_info(slice=2) # error if slice dimension outside shape - assert_raises(HeaderDataError, hdr.get_slice_times) + with pytest.raises(HeaderDataError): + hdr.get_slice_times() hdr.set_data_shape((1, 1, 7)) # error if slice duration not set - assert_raises(HeaderDataError, hdr.get_slice_times) + with pytest.raises(HeaderDataError): + hdr.get_slice_times() hdr.set_slice_duration(0.1) # We need a function to print out the Nones and floating point # values in a predictable way, for the tests below. @@ -530,51 +535,56 @@ def test_slice_times(self): _print_me = lambda s: list(map(_stringer, s)) # The following examples are from the nifti1.h documentation. hdr['slice_code'] = slice_order_codes['sequential increasing'] - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == ['0.0', '0.1', '0.2', '0.3', '0.4', '0.5', '0.6']) hdr['slice_start'] = 1 hdr['slice_end'] = 5 - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == [None, '0.0', '0.1', '0.2', '0.3', '0.4', None]) hdr['slice_code'] = slice_order_codes['sequential decreasing'] - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == [None, '0.4', '0.3', '0.2', '0.1', '0.0', None]) hdr['slice_code'] = slice_order_codes['alternating increasing'] - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == [None, '0.0', '0.3', '0.1', '0.4', '0.2', None]) hdr['slice_code'] = slice_order_codes['alternating decreasing'] - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == [None, '0.2', '0.4', '0.1', '0.3', '0.0', None]) hdr['slice_code'] = slice_order_codes['alternating increasing 2'] - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == [None, '0.2', '0.0', '0.3', '0.1', '0.4', None]) hdr['slice_code'] = slice_order_codes['alternating decreasing 2'] - assert_equal(_print_me(hdr.get_slice_times()), + assert (_print_me(hdr.get_slice_times()) == [None, '0.4', '0.1', '0.3', '0.0', '0.2', None]) # test set hdr = self.header_class() hdr.set_dim_info(slice=2) # need slice dim to correspond with shape times = [None, 0.2, 0.4, 0.1, 0.3, 0.0, None] - assert_raises(HeaderDataError, hdr.set_slice_times, times) + with pytest.raises(HeaderDataError): + hdr.set_slice_times(times) hdr.set_data_shape([1, 1, 7]) - assert_raises(HeaderDataError, hdr.set_slice_times, - times[:-1]) # wrong length - assert_raises(HeaderDataError, hdr.set_slice_times, - (None,) * len(times)) # all None + with pytest.raises(HeaderDataError): + # wrong length + hdr.set_slice_times(times[:-1]) + with pytest.raises(HeaderDataError): + # all None + hdr.set_slice_times((None,) * len(times)) n_mid_times = times[:] n_mid_times[3] = None - assert_raises(HeaderDataError, hdr.set_slice_times, - n_mid_times) # None in middle + with pytest.raises(HeaderDataError): + # None in middle + hdr.set_slice_times(n_mid_times) funny_times = times[:] funny_times[3] = 0.05 - assert_raises(HeaderDataError, hdr.set_slice_times, - funny_times) # can't get single slice duration + with pytest.raises(HeaderDataError): + # can't get single slice duration + hdr.set_slice_times(funny_times) hdr.set_slice_times(times) - assert_equal(hdr.get_value_label('slice_code'), + assert (hdr.get_value_label('slice_code') == 'alternating decreasing') - assert_equal(hdr['slice_start'], 1) - assert_equal(hdr['slice_end'], 5) + assert hdr['slice_start'] == 1 + assert hdr['slice_end'] == 5 assert_array_almost_equal(hdr['slice_duration'], 0.1) # Ambiguous case @@ -587,121 +597,119 @@ def test_slice_times(self): hdr2.set_slice_times([0.1, 0]) assert len(w) == 1 # but always must be choosing sequential one first - assert_equal(hdr2.get_value_label('slice_code'), 'sequential decreasing') + assert hdr2.get_value_label('slice_code') == 'sequential decreasing' # and the other direction hdr2.set_slice_times([0, 0.1]) - assert_equal(hdr2.get_value_label('slice_code'), 'sequential increasing') + assert hdr2.get_value_label('slice_code') == 'sequential increasing' def test_intents(self): ehdr = self.header_class() ehdr.set_intent('t test', (10,), name='some score') - assert_equal(ehdr.get_intent(), + assert (ehdr.get_intent() == ('t test', (10.0,), 'some score')) # unknown intent name or code - unknown name will fail even when # allow_unknown=True - assert_raises(KeyError, ehdr.set_intent, 'no intention') - assert_raises(KeyError, ehdr.set_intent, 'no intention', - allow_unknown=True) - assert_raises(KeyError, ehdr.set_intent, 32767) + with pytest.raises(KeyError): + ehdr.set_intent('no intention') + with pytest.raises(KeyError): + ehdr.set_intent('no intention', allow_unknown=True) + with pytest.raises(KeyError): + ehdr.set_intent(32767) # too many parameters - assert_raises(HeaderDataError, ehdr.set_intent, 't test', (10, 10)) + with pytest.raises(HeaderDataError): + ehdr.set_intent('t test', (10, 10)) # too few parameters - assert_raises(HeaderDataError, ehdr.set_intent, 'f test', (10,)) + with pytest.raises(HeaderDataError): + ehdr.set_intent('f test', (10,)) # check unset parameters are set to 0, and name to '' ehdr.set_intent('t test') - assert_equal((ehdr['intent_p1'], ehdr['intent_p2'], ehdr['intent_p3']), + assert ((ehdr['intent_p1'], ehdr['intent_p2'], ehdr['intent_p3']) == (0, 0, 0)) - assert_equal(ehdr['intent_name'], b'') + assert ehdr['intent_name'] == b'' ehdr.set_intent('t test', (10,)) - assert_equal((ehdr['intent_p2'], ehdr['intent_p3']), (0, 0)) + assert (ehdr['intent_p2'], ehdr['intent_p3']) == (0, 0) # store intent that is not in nifti1.intent_codes recoder ehdr.set_intent(9999, allow_unknown=True) - assert_equal(ehdr.get_intent(), ('unknown code 9999', (), '')) - assert_equal(ehdr.get_intent('code'), (9999, (), '')) + assert ehdr.get_intent() == ('unknown code 9999', (), '') + assert ehdr.get_intent('code') == (9999, (), '') ehdr.set_intent(9999, name='custom intent', allow_unknown=True) - assert_equal(ehdr.get_intent(), + assert (ehdr.get_intent() == ('unknown code 9999', (), 'custom intent')) - assert_equal(ehdr.get_intent('code'), (9999, (), 'custom intent')) + assert ehdr.get_intent('code') == (9999, (), 'custom intent') # store unknown intent with parameters. set_intent will set the # parameters, but get_intent won't return them ehdr.set_intent(code=9999, params=(1, 2, 3), allow_unknown=True) - assert_equal(ehdr.get_intent(), ('unknown code 9999', (), '')) - assert_equal(ehdr.get_intent('code'), (9999, (), '')) + assert ehdr.get_intent() == ('unknown code 9999', (), '') + assert ehdr.get_intent('code') == (9999, (), '') # unknown intent requires either zero, or three, parameters - assert_raises(HeaderDataError, ehdr.set_intent, 999, (1,), - allow_unknown=True) - assert_raises(HeaderDataError, ehdr.set_intent, 999, (1,2), - allow_unknown=True) + with pytest.raises(HeaderDataError): + ehdr.set_intent(999, (1,), allow_unknown=True) + with pytest.raises(HeaderDataError): + ehdr.set_intent(999, (1,2), allow_unknown=True) def test_set_slice_times(self): hdr = self.header_class() hdr.set_dim_info(slice=2) hdr.set_data_shape([1, 1, 7]) hdr.set_slice_duration(0.1) - times = [0] * 6 - assert_raises(HeaderDataError, hdr.set_slice_times, times) - times = [None] * 7 - assert_raises(HeaderDataError, hdr.set_slice_times, times) - times = [None, 0, 1, None, 3, 4, None] - assert_raises(HeaderDataError, hdr.set_slice_times, times) - times = [None, 0, 1, 2.1, 3, 4, None] - assert_raises(HeaderDataError, hdr.set_slice_times, times) - times = [None, 0, 4, 3, 2, 1, None] - assert_raises(HeaderDataError, hdr.set_slice_times, times) + for times in [[0] * 6, [None] * 7, [None, 0, 1, None, 3, 4, None], + [None, 0, 1, 2.1, 3, 4, None], [None, 0, 4, 3, 2, 1, None]]: + with pytest.raises(HeaderDataError): + hdr.set_slice_times(times) times = [0, 1, 2, 3, 4, 5, 6] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 1) - assert_equal(hdr['slice_start'], 0) - assert_equal(hdr['slice_end'], 6) - assert_equal(hdr['slice_duration'], 1.0) + assert hdr['slice_code'] == 1 + assert hdr['slice_start'] == 0 + assert hdr['slice_end'] == 6 + assert hdr['slice_duration'] == 1.0 times = [None, 0, 1, 2, 3, 4, None] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 1) - assert_equal(hdr['slice_start'], 1) - assert_equal(hdr['slice_end'], 5) - assert_equal(hdr['slice_duration'], 1.0) + assert hdr['slice_code'] == 1 + assert hdr['slice_start'] == 1 + assert hdr['slice_end'] == 5 + assert hdr['slice_duration'] == 1.0 times = [None, 0.4, 0.3, 0.2, 0.1, 0, None] hdr.set_slice_times(times) - assert_true(np.allclose(hdr['slice_duration'], 0.1)) + assert np.allclose(hdr['slice_duration'], 0.1) times = [None, 4, 3, 2, 1, 0, None] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 2) + assert hdr['slice_code'] == 2 times = [None, 0, 3, 1, 4, 2, None] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 3) + assert hdr['slice_code'] == 3 times = [None, 2, 4, 1, 3, 0, None] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 4) + assert hdr['slice_code'] == 4 times = [None, 2, 0, 3, 1, 4, None] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 5) + assert hdr['slice_code'] == 5 times = [None, 4, 1, 3, 0, 2, None] hdr.set_slice_times(times) - assert_equal(hdr['slice_code'], 6) + assert hdr['slice_code'] == 6 def test_xyzt_units(self): hdr = self.header_class() - assert_equal(hdr.get_xyzt_units(), ('unknown', 'unknown')) + assert hdr.get_xyzt_units() == ('unknown', 'unknown') hdr.set_xyzt_units('mm', 'sec') - assert_equal(hdr.get_xyzt_units(), ('mm', 'sec')) + assert hdr.get_xyzt_units() == ('mm', 'sec') hdr.set_xyzt_units() - assert_equal(hdr.get_xyzt_units(), ('unknown', 'unknown')) + assert hdr.get_xyzt_units() == ('unknown', 'unknown') def test_recoded_fields(self): hdr = self.header_class() - assert_equal(hdr.get_value_label('qform_code'), 'unknown') + assert hdr.get_value_label('qform_code') == 'unknown' hdr['qform_code'] = 3 - assert_equal(hdr.get_value_label('qform_code'), 'talairach') - assert_equal(hdr.get_value_label('sform_code'), 'unknown') + assert hdr.get_value_label('qform_code') == 'talairach' + assert hdr.get_value_label('sform_code') == 'unknown' hdr['sform_code'] = 3 - assert_equal(hdr.get_value_label('sform_code'), 'talairach') - assert_equal(hdr.get_value_label('intent_code'), 'none') + assert hdr.get_value_label('sform_code') == 'talairach' + assert hdr.get_value_label('intent_code') == 'none' hdr.set_intent('t test', (10,), name='some score') - assert_equal(hdr.get_value_label('intent_code'), 't test') - assert_equal(hdr.get_value_label('slice_code'), 'unknown') + assert hdr.get_value_label('intent_code') == 't test' + assert hdr.get_value_label('slice_code') == 'unknown' hdr['slice_code'] = 4 # alternating decreasing - assert_equal(hdr.get_value_label('slice_code'), + assert (hdr.get_value_label('slice_code') == 'alternating decreasing') @@ -721,9 +729,9 @@ class TestNifti1SingleHeader(TestNifti1PairHeader): def test_empty(self): tana.TestAnalyzeHeader.test_empty(self) hdr = self.header_class() - assert_equal(hdr['magic'], hdr.single_magic) - assert_equal(hdr['scl_slope'], 1) - assert_equal(hdr['vox_offset'], 0) + assert hdr['magic'] == hdr.single_magic + assert hdr['scl_slope'] == 1 + assert hdr['vox_offset'] == 0 def test_binblock_is_file(self): # Override test that binary string is the same as the file on disk; in @@ -732,7 +740,7 @@ def test_binblock_is_file(self): hdr = self.header_class() str_io = BytesIO() hdr.write_to(str_io) - assert_equal(str_io.getvalue(), hdr.binaryblock + b'\x00' * 4) + assert str_io.getvalue() == hdr.binaryblock + b'\x00' * 4 def test_float128(self): hdr = self.header_class() @@ -740,9 +748,10 @@ def test_float128(self): ld_dt = np.dtype(np.longdouble) if have_binary128() or ld_dt == np.dtype(np.float64): hdr.set_data_dtype(np.longdouble) - assert_equal(hdr.get_data_dtype(), ld_dt) + assert hdr.get_data_dtype() == ld_dt else: - assert_raises(HeaderDataError, hdr.set_data_dtype, np.longdouble) + with pytest.raises(HeaderDataError): + hdr.set_data_dtype(np.longdouble) class TestNifti1Pair(tana.TestAnalyzeImage, tspm.ImageScalingMixin): @@ -792,13 +801,13 @@ def test_qform_cycle(self): # None affine img = img_klass(np.zeros((2, 3, 4)), None) hdr_back = self._qform_rt(img).header - assert_equal(hdr_back['qform_code'], 3) - assert_equal(hdr_back['sform_code'], 4) + assert hdr_back['qform_code'] == 3 + assert hdr_back['sform_code'] == 4 # Try non-None affine img = img_klass(np.zeros((2, 3, 4)), np.eye(4)) hdr_back = self._qform_rt(img).header - assert_equal(hdr_back['qform_code'], 3) - assert_equal(hdr_back['sform_code'], 4) + assert hdr_back['qform_code'] == 3 + assert hdr_back['sform_code'] == 4 # Modify affine in-place - does it hold? img.affine[0, 0] = 9 img.to_file_map() @@ -818,8 +827,8 @@ def test_header_update_affine(self): hdr.set_qform(aff, 2) hdr.set_sform(aff, 2) img.update_header() - assert_equal(hdr['sform_code'], 2) - assert_equal(hdr['qform_code'], 2) + assert hdr['sform_code'] == 2 + assert hdr['qform_code'] == 2 def test_set_qform(self): img = self.image_class(np.zeros((2, 3, 4)), @@ -835,12 +844,12 @@ def test_set_qform(self): # Set qform using new_affine img.set_qform(new_affine, 1) assert_array_almost_equal(img.get_qform(), new_affine) - assert_equal(hdr['qform_code'], 1) + assert hdr['qform_code'] == 1 # Image get is same as header get assert_array_almost_equal(img.get_qform(), new_affine) # Coded version of get gets same information qaff, code = img.get_qform(coded=True) - assert_equal(code, 1) + assert code == 1 assert_array_almost_equal(qaff, new_affine) # Image affine now reset to best affine (which is sform) assert_array_almost_equal(img.affine, hdr.get_best_affine()) @@ -852,7 +861,7 @@ def test_set_qform(self): assert_array_almost_equal(hdr.get_zooms(), [1.1, 1.1, 1.1]) img.set_qform(None) qaff, code = img.get_qform(coded=True) - assert_equal((qaff, code), (None, 0)) + assert (qaff, code) == (None, 0) assert_array_almost_equal(hdr.get_zooms(), [1.1, 1.1, 1.1]) # Best affine similarly assert_array_almost_equal(img.affine, hdr.get_best_affine()) @@ -860,14 +869,16 @@ def test_set_qform(self): img.set_sform(None) img.set_qform(new_affine, 1) qaff, code = img.get_qform(coded=True) - assert_equal(code, 1) + assert code == 1 assert_array_almost_equal(img.affine, new_affine) new_affine[0, 1] = 2 # If affine has has shear, should raise Error if strip_shears=False img.set_qform(new_affine, 2) - assert_raises(HeaderDataError, img.set_qform, new_affine, 2, False) + with pytest.raises(HeaderDataError): + img.set_qform(new_affine, 2, False) # Unexpected keyword raises error - assert_raises(TypeError, img.get_qform, strange=True) + with pytest.raises(TypeError): + img.get_qform(strange=True) # updating None affine, None header does not work, because None header # results in setting the sform to default img = self.image_class(np.zeros((2, 3, 4)), None) @@ -889,16 +900,16 @@ def test_set_sform(self): img.affine[:] = aff_affine assert_array_almost_equal(img.affine, aff_affine) # Sform, Qform codes are 'aligned', 'unknown' by default - assert_equal((hdr['sform_code'], hdr['qform_code']), (2, 0)) + assert (hdr['sform_code'], hdr['qform_code']) == (2, 0) # Set sform using new_affine when qform is 0 img.set_sform(new_affine, 1) - assert_equal(hdr['sform_code'], 1) + assert hdr['sform_code'] == 1 assert_array_almost_equal(hdr.get_sform(), new_affine) # Image get is same as header get assert_array_almost_equal(img.get_sform(), new_affine) # Coded version gives same result saff, code = img.get_sform(coded=True) - assert_equal(code, 1) + assert code == 1 assert_array_almost_equal(saff, new_affine) # Because we've reset the sform with update_affine, the affine changes assert_array_almost_equal(img.affine, hdr.get_best_affine()) @@ -915,22 +926,23 @@ def test_set_sform(self): img.set_qform(qform_affine, 1) img.set_sform(new_affine, 1) saff, code = img.get_sform(coded=True) - assert_equal(code, 1) + assert code == 1 assert_array_almost_equal(saff, new_affine) assert_array_almost_equal(img.affine, new_affine) # zooms follow qform assert_array_almost_equal(hdr.get_zooms(), [1.2, 1.2, 1.2]) # Clear sform using None, best_affine should fall back on qform img.set_sform(None) - assert_equal(hdr['sform_code'], 0) - assert_equal(hdr['qform_code'], 1) + assert hdr['sform_code'] == 0 + assert hdr['qform_code'] == 1 # Sform holds previous affine from last set assert_array_almost_equal(hdr.get_sform(), saff) # Image affine follows qform assert_array_almost_equal(img.affine, qform_affine) assert_array_almost_equal(hdr.get_best_affine(), img.affine) # Unexpected keyword raises error - assert_raises(TypeError, img.get_sform, strange=True) + with pytest.raises(TypeError): + img.get_sform(strange=True) # updating None affine should also work img = self.image_class(np.zeros((2, 3, 4)), None) new_affine = np.eye(4) @@ -971,8 +983,8 @@ def test_load_save(self): data = np.arange(np.prod(shape), dtype=npt).reshape(shape) affine = np.diag([1, 2, 3, 1]) img = IC(data, affine) - assert_equal(img.header.get_data_offset(), 0) - assert_equal(img.shape, shape) + assert img.header.get_data_offset() == 0 + assert img.shape == shape img.set_data_dtype(npt) img2 = bytesio_round_trip(img) assert_array_equal(img2.get_fdata(), data) @@ -981,11 +993,11 @@ def test_load_save(self): fname = os.path.join(tmpdir, 'test' + img_ext + ext) img.to_filename(fname) img3 = IC.load(fname) - assert_true(isinstance(img3, img.__class__)) + assert isinstance(img3, img.__class__) assert_array_equal(img3.get_fdata(), data) - assert_equal(img3.header, img.header) - assert_true(isinstance(np.asanyarray(img3.dataobj), - np.memmap if ext == '' else np.ndarray)) + assert img3.header == img.header + assert isinstance(np.asanyarray(img3.dataobj), + np.memmap if ext == '' else np.ndarray) # del to avoid windows errors of form 'The process cannot # access the file because it is being used' del img3 @@ -1027,8 +1039,8 @@ def test_affines_init(self): # Default is sform set, qform not set img = IC(arr, aff) hdr = img.header - assert_equal(hdr['qform_code'], 0) - assert_equal(hdr['sform_code'], 2) + assert hdr['qform_code'] == 0 + assert hdr['sform_code'] == 2 assert_array_equal(hdr.get_zooms(), [2, 3, 4]) # This is also true for affines with header passed qaff = np.diag([3, 4, 5, 1]) @@ -1039,16 +1051,16 @@ def test_affines_init(self): img = IC(arr, aff, hdr) new_hdr = img.header # Again affine is sort of anonymous space - assert_equal(new_hdr['qform_code'], 0) - assert_equal(new_hdr['sform_code'], 2) + assert new_hdr['qform_code'] == 0 + assert new_hdr['sform_code'] == 2 assert_array_equal(new_hdr.get_sform(), aff) assert_array_equal(new_hdr.get_zooms(), [2, 3, 4]) # But if no affine passed, codes and matrices stay the same img = IC(arr, None, hdr) new_hdr = img.header - assert_equal(new_hdr['qform_code'], 1) # scanner + assert new_hdr['qform_code'] == 1 # scanner assert_array_equal(new_hdr.get_qform(), qaff) - assert_equal(new_hdr['sform_code'], 3) # Still talairach + assert new_hdr['sform_code'] == 3 # Still talairach assert_array_equal(new_hdr.get_sform(), saff) # Pixdims as in the original header assert_array_equal(new_hdr.get_zooms(), [3, 4, 5]) @@ -1057,13 +1069,13 @@ def test_read_no_extensions(self): IC = self.image_class arr = np.arange(24).reshape((2, 3, 4)) img = IC(arr, np.eye(4)) - assert_equal(len(img.header.extensions), 0) + assert len(img.header.extensions) == 0 img_rt = bytesio_round_trip(img) - assert_equal(len(img_rt.header.extensions), 0) + assert len(img_rt.header.extensions) == 0 # Check simple round trip with large offset img.header.set_data_offset(1024) img_rt = bytesio_round_trip(img) - assert_equal(len(img_rt.header.extensions), 0) + assert len(img_rt.header.extensions) == 0 def _get_raw_scaling(self, hdr): return hdr['scl_slope'], hdr['scl_inter'] @@ -1094,34 +1106,35 @@ def test_offset_errors(self): IC = self.image_class arr = np.arange(24).reshape((2, 3, 4)) img = IC(arr, np.eye(4)) - assert_equal(img.header.get_data_offset(), 0) + assert img.header.get_data_offset() == 0 # Saving with zero offset is OK img_rt = bytesio_round_trip(img) - assert_equal(img_rt.header.get_data_offset(), 0) + assert img_rt.header.get_data_offset() == 0 # Saving with too low offset explicitly set gives error fm = bytesio_filemap(IC) img.header.set_data_offset(16) - assert_raises(HeaderDataError, img.to_file_map, fm) + with pytest.raises(HeaderDataError): + img.to_file_map(fm) def test_extension_basics(): raw = '123' ext = Nifti1Extension('comment', raw) - assert_true(ext.get_sizeondisk() == 16) - assert_true(ext.get_content() == raw) - assert_true(ext.get_code() == 6) + assert ext.get_sizeondisk() == 16 + assert ext.get_content() == raw + assert ext.get_code() == 6 # Test that extensions already aligned to 16 bytes are not padded ext = Nifti1Extension('comment', b'x' * 24) - assert_true(ext.get_sizeondisk() == 32) + assert ext.get_sizeondisk() == 32 def test_ext_eq(): ext = Nifti1Extension('comment', '123') - assert_true(ext == ext) - assert_false(ext != ext) + assert ext == ext + assert not ext != ext ext2 = Nifti1Extension('comment', '124') - assert_false(ext == ext2) - assert_true(ext != ext2) + assert not ext == ext2 + assert ext != ext2 def test_extension_codes(): @@ -1132,12 +1145,12 @@ def test_extension_codes(): def test_extension_list(): ext_c0 = Nifti1Extensions() ext_c1 = Nifti1Extensions() - assert_equal(ext_c0, ext_c1) + assert ext_c0 == ext_c1 ext = Nifti1Extension('comment', '123') ext_c1.append(ext) - assert_false(ext_c0 == ext_c1) + assert not ext_c0 == ext_c1 ext_c0.append(ext) - assert_true(ext_c0 == ext_c1) + assert ext_c0 == ext_c1 def test_extension_io(): @@ -1146,23 +1159,23 @@ def test_extension_io(): ext1.write_to(bio, False) bio.seek(0) ebacks = Nifti1Extensions.from_fileobj(bio, -1, False) - assert_equal(len(ebacks), 1) - assert_equal(ext1, ebacks[0]) + assert len(ebacks) == 1 + assert ext1 == ebacks[0] # Check the start is what we expect exp_dtype = np.dtype([('esize', 'i4'), ('ecode', 'i4')]) bio.seek(0) buff = np.ndarray(shape=(), dtype=exp_dtype, buffer=bio.read(16)) - assert_equal(buff['esize'], 32) - assert_equal(buff['ecode'], 6) + assert buff['esize'] == 32 + assert buff['ecode'] == 6 # Try another extension on top bio.seek(32) ext2 = Nifti1Extension(6, b'Comment') ext2.write_to(bio, False) bio.seek(0) ebacks = Nifti1Extensions.from_fileobj(bio, -1, False) - assert_equal(len(ebacks), 2) - assert_equal(ext1, ebacks[0]) - assert_equal(ext2, ebacks[1]) + assert len(ebacks) == 2 + assert ext1 == ebacks[0] + assert ext2 == ebacks[1] # Rewrite but deliberately setting esize wrongly bio.truncate(0) bio.seek(0) @@ -1178,11 +1191,11 @@ def test_extension_io(): bio.seek(0) with warnings.catch_warnings(record=True) as warns: ebacks = Nifti1Extensions.from_fileobj(bio, -1, False) - assert_equal(len(warns), 1) - assert_equal(warns[0].category, UserWarning) - assert_equal(len(ebacks), 2) - assert_equal(ext1, ebacks[0]) - assert_equal(ext2, ebacks[1]) + assert len(warns) == 1 + assert warns[0].category == UserWarning + assert len(ebacks) == 2 + assert ext1 == ebacks[0] + assert ext2 == ebacks[1] def test_nifti_extensions(): @@ -1190,25 +1203,25 @@ def test_nifti_extensions(): # basic checks of the available extensions hdr = nim.header exts_container = hdr.extensions - assert_equal(len(exts_container), 2) - assert_equal(exts_container.count('comment'), 2) - assert_equal(exts_container.count('afni'), 0) - assert_equal(exts_container.get_codes(), [6, 6]) - assert_equal((exts_container.get_sizeondisk()) % 16, 0) + assert len(exts_container) == 2 + assert exts_container.count('comment') == 2 + assert exts_container.count('afni') == 0 + assert exts_container.get_codes() == [6, 6] + assert (exts_container.get_sizeondisk()) % 16 == 0 # first extension should be short one - assert_equal(exts_container[0].get_content(), b'extcomment1') + assert exts_container[0].get_content() == b'extcomment1' # add one afniext = Nifti1Extension('afni', '') exts_container.append(afniext) - assert_true(exts_container.get_codes() == [6, 6, 4]) - assert_true(exts_container.count('comment') == 2) - assert_true(exts_container.count('afni') == 1) - assert_true((exts_container.get_sizeondisk()) % 16 == 0) + assert exts_container.get_codes() == [6, 6, 4] + assert exts_container.count('comment') == 2 + assert exts_container.count('afni') == 1 + assert (exts_container.get_sizeondisk()) % 16 == 0 # delete one del exts_container[1] - assert_true(exts_container.get_codes() == [6, 4]) - assert_true(exts_container.count('comment') == 1) - assert_true(exts_container.count('afni') == 1) + assert exts_container.get_codes() == [6, 4] + assert exts_container.count('comment') == 1 + assert exts_container.count('afni') == 1 @dicom_test @@ -1219,47 +1232,47 @@ def test_nifti_dicom_extension(): # create an empty dataset if no content provided (to write a new header) dcmext = Nifti1DicomExtension(2, b'') - assert_equal(dcmext.get_content().__class__, pydicom.dataset.Dataset) - assert_equal(len(dcmext.get_content().values()), 0) + assert dcmext.get_content().__class__ == pydicom.dataset.Dataset + assert len(dcmext.get_content().values()) == 0 # create an empty dataset if no content provided (to write a new header) dcmext = Nifti1DicomExtension(2, None) - assert_equal(dcmext.get_content().__class__, pydicom.dataset.Dataset) - assert_equal(len(dcmext.get_content().values()), 0) + assert dcmext.get_content().__class__ == pydicom.dataset.Dataset + assert len(dcmext.get_content().values()) == 0 # use a dataset if provided ds = pydicom.dataset.Dataset() ds.add_new((0x10, 0x20), 'LO', 'NiPy') dcmext = Nifti1DicomExtension(2, ds) - assert_equal(dcmext.get_content().__class__, pydicom.dataset.Dataset) - assert_equal(len(dcmext.get_content().values()), 1) - assert_equal(dcmext.get_content().PatientID, 'NiPy') + assert dcmext.get_content().__class__ == pydicom.dataset.Dataset + assert len(dcmext.get_content().values()) == 1 + assert dcmext.get_content().PatientID == 'NiPy' # create a single dicom tag (Patient ID, [0010,0020]) with Explicit VR / LE dcmbytes_explicit = struct.pack('2H2sH4s', 0x10, 0x20, @@ -1267,30 +1280,31 @@ def test_nifti_dicom_extension(): 'NiPy'.encode('utf-8')) hdr_be = Nifti1Header(endianness='>') # Big Endian Nifti1Header dcmext = Nifti1DicomExtension(2, dcmbytes_explicit_be, parent_hdr=hdr_be) - assert_equal(dcmext.__class__, Nifti1DicomExtension) - assert_equal(dcmext._guess_implicit_VR(), False) - assert_equal(dcmext.get_code(), 2) - assert_equal(dcmext.get_content().PatientID, 'NiPy') - assert_equal(dcmext.get_content()[0x10, 0x20].value, 'NiPy') - assert_equal(len(dcmext.get_content().values()), 1) - assert_equal(dcmext._mangle(dcmext.get_content()), dcmbytes_explicit_be) - assert_equal(dcmext.get_sizeondisk() % 16, 0) + assert dcmext.__class__ == Nifti1DicomExtension + assert dcmext._guess_implicit_VR() == False + assert dcmext.get_code() == 2 + assert dcmext.get_content().PatientID == 'NiPy' + assert dcmext.get_content()[0x10, 0x20].value == 'NiPy' + assert len(dcmext.get_content().values()) == 1 + assert dcmext._mangle(dcmext.get_content()) == dcmbytes_explicit_be + assert dcmext.get_sizeondisk() % 16 == 0 # Check that a dicom dataset is written w/ BE encoding when not created # using BE bytestring when given a BE nifti header dcmext = Nifti1DicomExtension(2, ds, parent_hdr=hdr_be) - assert_equal(dcmext._mangle(dcmext.get_content()), dcmbytes_explicit_be) + assert dcmext._mangle(dcmext.get_content()) == dcmbytes_explicit_be # dicom extension access from nifti extensions - assert_equal(exts_container.count('dicom'), 0) + assert exts_container.count('dicom') == 0 exts_container.append(dcmext) - assert_equal(exts_container.count('dicom'), 1) - assert_equal(exts_container.get_codes(), [6, 6, 2]) - assert_equal(dcmext._mangle(dcmext.get_content()), dcmbytes_explicit_be) - assert_equal(dcmext.get_sizeondisk() % 16, 0) + assert exts_container.count('dicom') == 1 + assert exts_container.get_codes() == [6, 6, 2] + assert dcmext._mangle(dcmext.get_content()) == dcmbytes_explicit_be + assert dcmext.get_sizeondisk() % 16 == 0 # creating an extension with bad content should raise - assert_raises(TypeError, Nifti1DicomExtension, 2, 0) + with pytest.raises(TypeError): + Nifti1DicomExtension(2, 0) class TestNifti1General(object): @@ -1309,38 +1323,38 @@ def test_loadsave_cycle(self): # ensure we have extensions hdr = nim.header exts_container = hdr.extensions - assert_true(len(exts_container) > 0) + assert len(exts_container) > 0 # write into the air ;-) lnim = bytesio_round_trip(nim) hdr = lnim.header lexts_container = hdr.extensions - assert_equal(exts_container, lexts_container) + assert exts_container == lexts_container # build int16 image data = np.ones((2, 3, 4, 5), dtype='int16') img = self.single_class(data, np.eye(4)) hdr = img.header - assert_equal(hdr.get_data_dtype(), np.int16) + assert hdr.get_data_dtype() == np.int16 # default should have no scaling assert_array_equal(hdr.get_slope_inter(), (None, None)) # set scaling hdr.set_slope_inter(2, 8) - assert_equal(hdr.get_slope_inter(), (2, 8)) + assert hdr.get_slope_inter() == (2, 8) # now build new image with updated header wnim = self.single_class(data, np.eye(4), header=hdr) - assert_equal(wnim.get_data_dtype(), np.int16) + assert wnim.get_data_dtype() == np.int16 # Header scaling reset to default by image creation - assert_equal(wnim.header.get_slope_inter(), (None, None)) + assert wnim.header.get_slope_inter() == (None, None) # But we can reset it again after image creation wnim.header.set_slope_inter(2, 8) - assert_equal(wnim.header.get_slope_inter(), (2, 8)) + assert wnim.header.get_slope_inter() == (2, 8) # write into the air again ;-) lnim = bytesio_round_trip(wnim) - assert_equal(lnim.get_data_dtype(), np.int16) + assert lnim.get_data_dtype() == np.int16 # Scaling applied assert_array_equal(lnim.get_fdata(), data * 2. + 8.) # slope, inter reset by image creation, but saved in proxy - assert_equal(lnim.header.get_slope_inter(), (None, None)) - assert_equal((lnim.dataobj.slope, lnim.dataobj.inter), (2, 8)) + assert lnim.header.get_slope_inter() == (None, None) + assert (lnim.dataobj.slope, lnim.dataobj.inter) == (2, 8) def test_load(self): # test module level load. We try to load a nii and an .img and a .hdr @@ -1371,7 +1385,7 @@ def test_float_int_min_max(self): img = self.single_class(arr, aff) img_back = bytesio_round_trip(img) arr_back_sc = img_back.get_fdata() - assert_true(np.allclose(arr, arr_back_sc)) + assert np.allclose(arr, arr_back_sc) def test_float_int_spread(self): # Test rounding error for spread of values @@ -1392,7 +1406,7 @@ def test_float_int_spread(self): # Simulate allclose test with large atol diff = np.abs(arr_t - arr_back_sc) rdiff = diff / np.abs(arr_t) - assert_true(np.all((diff <= max_miss) | (rdiff <= 1e-5))) + assert np.all((diff <= max_miss) | (rdiff <= 1e-5)) def test_rt_bias(self): # Check for bias in round trip @@ -1415,7 +1429,7 @@ def test_rt_bias(self): inter) # Hokey use of max_miss as a std estimate bias_thresh = np.max([max_miss / np.sqrt(count), eps]) - assert_true(np.abs(bias) < bias_thresh) + assert np.abs(bias) < bias_thresh def test_reoriented_dim_info(self): # Check that dim_info is reoriented correctly @@ -1445,7 +1459,7 @@ def test_reoriented_dim_info(self): new_fdir = dirs[new_freq] if new_freq is not None else None new_pdir = dirs[new_phas] if new_phas is not None else None new_sdir = dirs[new_slic] if new_slic is not None else None - assert_equal((new_fdir, new_pdir, new_sdir), (fdir, pdir, sdir)) + assert (new_fdir, new_pdir, new_sdir) == (fdir, pdir, sdir) @runif_extra_has('slow') @@ -1459,6 +1473,6 @@ def test_large_nifti1(): del img data = load('test.nii.gz').get_fdata() # Check that the data are all ones - assert_equal(image_shape, data.shape) + assert image_shape == data.shape n_ones = np.sum((data == 1.)) - assert_equal(np.prod(image_shape), n_ones) + assert np.prod(image_shape) == n_ones diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index 730e30a689..7f9d32b6b9 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -19,10 +19,9 @@ from .test_nifti1 import (TestNifti1PairHeader, TestNifti1SingleHeader, TestNifti1Pair, TestNifti1Image, TestNifti1General) -from nose.tools import assert_equal from numpy.testing import assert_array_equal -from ..testing import data_path +from ..testing_pytest import data_path header_file = os.path.join(data_path, 'nifti2.hdr') image_file = os.path.join(data_path, 'example_nifti2.nii.gz') @@ -50,13 +49,13 @@ def test_eol_check(self): hdr['eol_check'] = 0 fhdr, message, raiser = self.log_chk(hdr, 20) assert_array_equal(fhdr['eol_check'], good_eol) - assert_equal(message, + assert (message == 'EOL check all 0; ' 'setting EOL check to 13, 10, 26, 10') hdr['eol_check'] = (13, 10, 0, 10) fhdr, message, raiser = self.log_chk(hdr, 40) assert_array_equal(fhdr['eol_check'], good_eol) - assert_equal(message, + assert (message == 'EOL check not 0 or 13, 10, 26, 10; ' 'data may be corrupted by EOL conversion; ' 'setting EOL check to 13, 10, 26, 10') @@ -110,6 +109,6 @@ def test_nifti12_conversion(): in_hdr.set_data_dtype(dtype_type) in_hdr.extensions[:] = [ext1, ext2] out_hdr = out_type.from_header(in_hdr) - assert_equal(out_hdr.get_data_shape(), shape) - assert_equal(out_hdr.get_data_dtype(), dtype_type) - assert_equal(in_hdr.extensions, out_hdr.extensions) + assert out_hdr.get_data_shape() == shape + assert out_hdr.get_data_dtype() == dtype_type + assert in_hdr.extensions == out_hdr.extensions diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 6d1baab734..eac73dd92b 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -19,9 +19,8 @@ from ..volumeutils import BinOpener from unittest import mock -from nose.tools import (assert_true, assert_false, assert_equal, - assert_not_equal, assert_raises) -from ..testing import error_warnings +import pytest +from ..testing_pytest import error_warnings class Lunk(object): @@ -41,24 +40,24 @@ def read(self): def test_Opener(): # Test default mode is 'rb' fobj = Opener(__file__) - assert_equal(fobj.mode, 'rb') + assert fobj.mode == 'rb' fobj.close() # That it's a context manager with Opener(__file__) as fobj: - assert_equal(fobj.mode, 'rb') + assert fobj.mode == 'rb' # That we can set the mode with Opener(__file__, 'r') as fobj: - assert_equal(fobj.mode, 'r') + assert fobj.mode == 'r' # with keyword arguments with Opener(__file__, mode='r') as fobj: - assert_equal(fobj.mode, 'r') + assert fobj.mode == 'r' # fileobj returns fileobj passed through message = b"Wine? Wouldn't you?" for obj in (BytesIO(message), Lunk(message)): with Opener(obj) as fobj: - assert_equal(fobj.read(), message) + assert fobj.read() == message # Which does not close the object - assert_false(obj.closed) + assert not obj.closed # mode is gently ignored fobj = Opener(obj, mode='r') @@ -77,31 +76,34 @@ def test_Opener_various(): sobj): with Opener(input, 'wb') as fobj: fobj.write(message) - assert_equal(fobj.tell(), len(message)) + assert fobj.tell() == len(message) if input == sobj: input.seek(0) with Opener(input, 'rb') as fobj: message_back = fobj.read() - assert_equal(message, message_back) + assert message == message_back if input == sobj: # Fileno is unsupported for BytesIO - assert_raises(UnsupportedOperation, fobj.fileno) + with pytest.raises(UnsupportedOperation): + fobj.fileno() elif input.endswith('.bz2') and not bz2_fileno: - assert_raises(AttributeError, fobj.fileno) + with pytest.raises(AttributeError): + fobj.fileno() # indexed gzip is used by default, and drops file # handles by default, so we don't have a fileno. elif input.endswith('gz') and HAVE_INDEXED_GZIP and \ StrictVersion(igzip.__version__) >= StrictVersion('0.7.0'): - assert_raises(igzip.NoHandleError, fobj.fileno) + with pytest.raises(igzip.NoHandleError): + fobj.fileno() else: # Just check there is a fileno - assert_not_equal(fobj.fileno(), 0) + assert fobj.fileno() != 0 def test_BinOpener(): with error_warnings(): - assert_raises(DeprecationWarning, - BinOpener, 'test.txt', 'r') + with pytest.raises(DeprecationWarning): + BinOpener('test.txt', 'r') class MockIndexedGzipFile(GzipFile): @@ -158,22 +160,26 @@ def test_Opener_gzip_type(): with patch_indexed_gzip(igzip_present): assert isinstance(Opener(fname, **kwargs).fobj, expected) +@pytest.fixture(scope="class") +def image_opener_setup(request): + compress_ext_map = ImageOpener.compress_ext_map.copy() + request.cls.compress_ext_map = compress_ext_map -class TestImageOpener: + def teardown(): + ImageOpener.compress_ext_map = request.cls.compress_ext_map + request.addfinalizer(teardown) - def setUp(self): - self.compress_ext_map = ImageOpener.compress_ext_map.copy() - def teardown(self): - ImageOpener.compress_ext_map = self.compress_ext_map +@pytest.mark.usefixtures("image_opener_setup") +class TestImageOpener: def test_vanilla(self): # Test that ImageOpener does add '.mgz' as gzipped file type with InTemporaryDirectory(): with ImageOpener('test.gz', 'w') as fobj: - assert_true(hasattr(fobj.fobj, 'compress')) + assert hasattr(fobj.fobj, 'compress') with ImageOpener('test.mgz', 'w') as fobj: - assert_true(hasattr(fobj.fobj, 'compress')) + assert hasattr(fobj.fobj, 'compress') def test_new_association(self): def file_opener(fileish, mode): @@ -182,16 +188,16 @@ def file_opener(fileish, mode): # Add the association n_associations = len(ImageOpener.compress_ext_map) ImageOpener.compress_ext_map['.foo'] = (file_opener, ('mode',)) - assert_equal(n_associations + 1, len(ImageOpener.compress_ext_map)) - assert_true('.foo' in ImageOpener.compress_ext_map) + assert n_associations + 1 == len(ImageOpener.compress_ext_map) + assert '.foo' in ImageOpener.compress_ext_map with InTemporaryDirectory(): with ImageOpener('test.foo', 'w'): pass - assert_true(os.path.exists('test.foo')) + assert os.path.exists('test.foo') # Check this doesn't add anything to parent - assert_false('.foo' in Opener.compress_ext_map) + assert not '.foo' in Opener.compress_ext_map def test_file_like_wrapper(): @@ -199,17 +205,17 @@ def test_file_like_wrapper(): message = b"History of the nude in" sobj = BytesIO() fobj = Opener(sobj) - assert_equal(fobj.tell(), 0) + assert fobj.tell() == 0 fobj.write(message) - assert_equal(fobj.tell(), len(message)) + assert fobj.tell() == len(message) fobj.seek(0) - assert_equal(fobj.tell(), 0) - assert_equal(fobj.read(6), message[:6]) - assert_false(fobj.closed) + assert fobj.tell() == 0 + assert fobj.read(6) == message[:6] + assert not fobj.closed fobj.close() - assert_true(fobj.closed) + assert fobj.closed # Added the fileobj name - assert_equal(fobj.name, None) + assert fobj.name == None def test_compressionlevel(): @@ -236,8 +242,8 @@ class MyOpener(Opener): with open(fname, 'rb') as fobj: my_selves_smaller = fobj.read() sizes[compresslevel] = len(my_selves_smaller) - assert_equal(sizes['default'], sizes[default_val]) - assert_true(sizes[1] > sizes[5]) + assert sizes['default'] == sizes[default_val] + assert sizes[1] > sizes[5] def test_compressed_ext_case(): @@ -256,23 +262,23 @@ class StrictOpener(Opener): with Opener(fname, 'wb') as fobj: fobj.write(contents) with Opener(fname, 'rb') as fobj: - assert_equal(fobj.read(), contents) + assert fobj.read() == contents os.unlink(fname) with StrictOpener(fname, 'wb') as fobj: fobj.write(contents) with StrictOpener(fname, 'rb') as fobj: - assert_equal(fobj.read(), contents) + assert fobj.read() == contents lext = ext.lower() if lext != ext: # extension should not be recognized -> file - assert_true(isinstance(fobj.fobj, file_class)) + assert isinstance(fobj.fobj, file_class) elif lext == 'gz': try: from ..openers import IndexedGzipFile except ImportError: IndexedGzipFile = GzipFile - assert_true(isinstance(fobj.fobj, (GzipFile, IndexedGzipFile))) + assert isinstance(fobj.fobj, (GzipFile, IndexedGzipFile)) else: - assert_true(isinstance(fobj.fobj, BZ2File)) + assert isinstance(fobj.fobj, BZ2File) def test_name(): @@ -287,22 +293,22 @@ def test_name(): lunk): exp_name = input if type(input) == type('') else None with Opener(input, 'wb') as fobj: - assert_equal(fobj.name, exp_name) + assert fobj.name == exp_name def test_set_extensions(): # Test that we can add extensions that are compressed with InTemporaryDirectory(): with Opener('test.gz', 'w') as fobj: - assert_true(hasattr(fobj.fobj, 'compress')) + assert hasattr(fobj.fobj, 'compress') with Opener('test.glrph', 'w') as fobj: - assert_false(hasattr(fobj.fobj, 'compress')) + assert not hasattr(fobj.fobj, 'compress') class MyOpener(Opener): compress_ext_map = Opener.compress_ext_map.copy() compress_ext_map['.glrph'] = Opener.gz_def with MyOpener('test.glrph', 'w') as fobj: - assert_true(hasattr(fobj.fobj, 'compress')) + assert hasattr(fobj.fobj, 'compress') def test_close_if_mine(): @@ -319,11 +325,11 @@ def test_close_if_mine(): # gzip objects have no 'closed' attribute has_closed = hasattr(fobj.fobj, 'closed') if has_closed: - assert_false(fobj.closed) + assert not fobj.closed fobj.close_if_mine() is_str = type(input) is type('') if has_closed: - assert_equal(fobj.closed, is_str) + assert fobj.closed == is_str def test_iter(): @@ -345,11 +351,12 @@ def test_iter(): fobj.write(asbytes(line + os.linesep)) with Opener(input, 'rb') as fobj: for back_line, line in zip(fobj, lines): - assert_equal(asstr(back_line).rstrip(), line) + assert asstr(back_line).rstrip() == line if not does_t: continue with Opener(input, 'rt') as fobj: for back_line, line in zip(fobj, lines): - assert_equal(back_line.rstrip(), line) + assert back_line.rstrip() == line lobj = Opener(Lunk('')) - assert_raises(TypeError, list, lobj) + with pytest.raises(TypeError): + list(lobj) diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index 17c0816e70..5603fc5127 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -7,10 +7,9 @@ import builtins from distutils.version import LooseVersion +# TODO: remove (have to be coordinated with optpkg) from nose import SkipTest -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal) - +import pytest from nibabel.optpkg import optional_package from nibabel.tripwire import TripWire, TripWireError @@ -18,17 +17,20 @@ def assert_good(pkg_name, min_version=None): pkg, have_pkg, setup = optional_package(pkg_name, min_version=min_version) - assert_true(have_pkg) - assert_equal(sys.modules[pkg_name], pkg) - assert_equal(setup(), None) + assert have_pkg + assert sys.modules[pkg_name] == pkg + assert setup() == None def assert_bad(pkg_name, min_version=None): pkg, have_pkg, setup = optional_package(pkg_name, min_version=min_version) - assert_false(have_pkg) - assert_true(isinstance(pkg, TripWire)) - assert_raises(TripWireError, getattr, pkg, 'a_method') - assert_raises(SkipTest, setup) + assert not have_pkg + assert isinstance(pkg, TripWire) + with pytest.raises(TripWireError): + getattr(pkg, 'a_method') + # TODO: remove + with pytest.raises(SkipTest): + setup() def test_basic(): @@ -54,7 +56,7 @@ def raise_Exception(*args, **kwargs): def test_versions(): fake_name = '_a_fake_package' fake_pkg = types.ModuleType(fake_name) - assert_false('fake_pkg' in sys.modules) + assert not 'fake_pkg' in sys.modules # Not inserted yet assert_bad(fake_name) try: @@ -77,7 +79,7 @@ def test_versions(): try: pkg.some_method except TripWireError as err: - assert_equal(str(err), + assert (str(err) == 'These functions need _a_fake_package version >= 3.0') finally: del sys.modules[fake_name] diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 798f595fc7..5013828757 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -11,7 +11,7 @@ import numpy as np import warnings -from nose.tools import assert_true, assert_equal, assert_raises +import pytest from numpy.testing import assert_array_equal @@ -128,15 +128,12 @@ def test_apply(): # Test 4D with an example orientation ornt = OUT_ORNTS[-1] t_arr = apply_orientation(a[:, :, :, None], ornt) - assert_equal(t_arr.ndim, 4) + assert t_arr.ndim == 4 # Orientation errors - assert_raises(OrientationError, - apply_orientation, - a[:, :, 1], ornt) - assert_raises(OrientationError, - apply_orientation, - a, - [[0, 1], [np.nan, np.nan], [2, 1]]) + with pytest.raises(OrientationError): + apply_orientation(a[:, :, 1], ornt) + with pytest.raises(OrientationError): + apply_orientation(a, [[0, 1], [np.nan, np.nan], [2, 1]]) shape = np.array(a.shape) for ornt in ALL_ORNTS: t_arr = apply_orientation(a, ornt) @@ -171,7 +168,7 @@ def test_io_orientation(): ornt = io_orientation(in_arr) assert_array_equal(ornt, out_ornt) taff = inv_ornt_aff(ornt, shape) - assert_true(same_transform(taff, ornt, shape)) + assert same_transform(taff, ornt, shape) for axno in range(3): arr = in_arr.copy() ex_ornt = out_ornt.copy() @@ -182,7 +179,7 @@ def test_io_orientation(): ornt = io_orientation(arr) assert_array_equal(ornt, ex_ornt) taff = inv_ornt_aff(ornt, shape) - assert_true(same_transform(taff, ornt, shape)) + assert same_transform(taff, ornt, shape) # Test nasty hang for zero columns rzs = np.c_[np.diag([2, 3, 4, 5]), np.zeros((4, 3))] arr = from_matvec(rzs, [15, 16, 17, 18]) @@ -252,54 +249,52 @@ def test_ornt_transform(): [[1, -1], [2, 1], [0, 1]] ) # Must have same shape - assert_raises(ValueError, - ornt_transform, - [[0, 1], [1, 1]], - [[0, 1], [1, 1], [2, 1]]) + with pytest.raises(ValueError): + ornt_transform([[0, 1], [1, 1]], [[0, 1], [1, 1], [2, 1]]) # Must be (N,2) in shape - assert_raises(ValueError, - ornt_transform, - [[0, 1, 1], [1, 1, 1]], - [[0, 1, 1], [1, 1, 1]]) + with pytest.raises(ValueError): + ornt_transform([[0, 1, 1], [1, 1, 1]], + [[0, 1, 1], [1, 1, 1]]) # Target axes must exist in source - assert_raises(ValueError, - ornt_transform, - [[0, 1], [1, 1], [1, 1]], - [[0, 1], [1, 1], [2, 1]]) + with pytest.raises(ValueError): + ornt_transform([[0, 1], [1, 1], [1, 1]], + [[0, 1], [1, 1], [2, 1]]) def test_ornt2axcodes(): # Recoding orientation to axis codes labels = (('left', 'right'), ('back', 'front'), ('down', 'up')) - assert_equal(ornt2axcodes([[0, 1], + assert ornt2axcodes([[0, 1], [1, 1], - [2, 1]], labels), ('right', 'front', 'up')) - assert_equal(ornt2axcodes([[0, -1], + [2, 1]], labels) == ('right', 'front', 'up') + assert ornt2axcodes([[0, -1], [1, -1], - [2, -1]], labels), ('left', 'back', 'down')) - assert_equal(ornt2axcodes([[2, -1], + [2, -1]], labels) == ('left', 'back', 'down') + assert ornt2axcodes([[2, -1], [1, -1], - [0, -1]], labels), ('down', 'back', 'left')) - assert_equal(ornt2axcodes([[1, 1], + [0, -1]], labels) == ('down', 'back', 'left') + assert ornt2axcodes([[1, 1], [2, -1], - [0, 1]], labels), ('front', 'down', 'right')) + [0, 1]], labels) == ('front', 'down', 'right') # default is RAS output directions - assert_equal(ornt2axcodes([[0, 1], + assert ornt2axcodes([[0, 1], [1, 1], - [2, 1]]), ('R', 'A', 'S')) + [2, 1]]) == ('R', 'A', 'S') # dropped axes produce None - assert_equal(ornt2axcodes([[0, 1], + assert ornt2axcodes([[0, 1], [np.nan, np.nan], - [2, 1]]), ('R', None, 'S')) + [2, 1]]) == ('R', None, 'S') # Non integer axes raises error - assert_raises(ValueError, ornt2axcodes, [[0.1, 1]]) + with pytest.raises(ValueError): + ornt2axcodes([[0.1, 1]]) # As do directions not in range - assert_raises(ValueError, ornt2axcodes, [[0, 0]]) + with pytest.raises(ValueError): + ornt2axcodes([[0, 0]]) for axcodes, ornt in zip(ALL_AXCODES, ALL_ORNTS): - assert_equal(ornt2axcodes(ornt), axcodes) + assert ornt2axcodes(ornt) == axcodes def test_axcodes2ornt(): @@ -339,44 +334,48 @@ def test_axcodes2ornt(): # Missing axcodes raise an error assert_array_equal(axcodes2ornt('RAS'), default) - assert_raises(ValueError, axcodes2ornt, 'rAS') + with pytest.raises(ValueError): + axcodes2ornt('rAS') # None is OK as axis code assert_array_equal(axcodes2ornt(('R', None, 'S')), [[0, 1], [np.nan, np.nan], [2, 1]]) # Bad axis code with None also raises error. - assert_raises(ValueError, axcodes2ornt, ('R', None, 's')) + with pytest.raises(ValueError): + axcodes2ornt(('R', None, 's')) # Axis codes checked with custom labels labels = ('SD', 'BF', 'lh') assert_array_equal(axcodes2ornt('BlD', labels), [[1, -1], [2, -1], [0, 1]]) - assert_raises(ValueError, axcodes2ornt, 'blD', labels) + with pytest.raises(ValueError): + axcodes2ornt('blD', labels) # Duplicate labels - assert_raises(ValueError, axcodes2ornt, 'blD', ('SD', 'BF', 'lD')) - assert_raises(ValueError, axcodes2ornt, 'blD', ('SD', 'SF', 'lD')) + for labels in [('SD', 'BF', 'lD'),('SD', 'SF', 'lD')]: + with pytest.raises(ValueError): + axcodes2ornt('blD', labels) for axcodes, ornt in zip(ALL_AXCODES, ALL_ORNTS): assert_array_equal(axcodes2ornt(axcodes), ornt) def test_aff2axcodes(): - assert_equal(aff2axcodes(np.eye(4)), tuple('RAS')) + assert aff2axcodes(np.eye(4)) == tuple('RAS') aff = [[0, 1, 0, 10], [-1, 0, 0, 20], [0, 0, 1, 30], [0, 0, 0, 1]] - assert_equal(aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))), + assert (aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))) == ('B', 'R', 'U')) - assert_equal(aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))), + assert (aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))) == ('B', 'R', 'U')) def test_inv_ornt_aff(): # Extra tests for inv_ornt_aff routines (also tested in # io_orientations test) - assert_raises(OrientationError, inv_ornt_aff, - [[0, 1], [1, -1], [np.nan, np.nan]], (3, 4, 5)) + with pytest.raises(OrientationError): + inv_ornt_aff([[0, 1], [1, -1], [np.nan, np.nan]], (3, 4, 5)) def test_orientation_affine_deprecation(): @@ -384,6 +383,6 @@ def test_orientation_affine_deprecation(): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') aff2 = orientation_affine([[0, 1], [1, -1], [2, 1]], (3, 4, 5)) - assert_equal(len(warns), 1) - assert_equal(warns[0].category, DeprecationWarning) + assert len(warns) == 1 + assert warns[0].category == DeprecationWarning assert_array_equal(aff1, aff2) diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 940d8864e5..bb0888b0e7 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -20,10 +20,8 @@ from numpy.testing import (assert_almost_equal, assert_array_equal) -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal) - -from ..testing import (clear_and_catch_warnings, suppress_warnings, +import pytest +from ..testing_pytest import (clear_and_catch_warnings, suppress_warnings, assert_arr_dict_equal) from .test_arrayproxy import check_mmap @@ -177,15 +175,15 @@ def test_header(): v41_hdr = PARRECHeader.from_fileobj(fobj, strict_sort=strict_sort) for hdr in (v42_hdr, v41_hdr, v4_hdr): hdr = PARRECHeader(HDR_INFO, HDR_DEFS) - assert_equal(hdr.get_data_shape(), (64, 64, 9, 3)) - assert_equal(hdr.get_data_dtype(), np.dtype(' Date: Sun, 1 Dec 2019 11:24:34 -0500 Subject: [PATCH 455/689] converting more tests to pytest --- nibabel/tests/test_recoder.py | 155 +++++++++++++++----------- nibabel/tests/test_removalschedule.py | 10 +- nibabel/tests/test_round_trip.py | 4 +- nibabel/tests/test_rstutils.py | 59 +++++----- 4 files changed, 123 insertions(+), 105 deletions(-) diff --git a/nibabel/tests/test_recoder.py b/nibabel/tests/test_recoder.py index e340936ff0..28eba8860b 100644 --- a/nibabel/tests/test_recoder.py +++ b/nibabel/tests/test_recoder.py @@ -12,50 +12,75 @@ from ..volumeutils import Recoder, DtypeMapper, native_code, swapped_code -from nose.tools import assert_equal, assert_raises, assert_true, assert_false +import pytest -def test_recoder(): +def test_recoder_1(): # simplest case, no aliases codes = ((1,), (2,)) rc = Recoder(codes) - yield assert_equal, rc.code[1], 1 - yield assert_equal, rc.code[2], 2 - yield assert_raises, KeyError, rc.code.__getitem__, 3 + assert rc.code[1] == 1 + assert rc.code[2] == 2 + with pytest.raises(KeyError): + rc.code.__getitem__(3) + +def test_recoder_2(): # with explicit name for code + codes = ((1,), (2,)) rc = Recoder(codes, ['code1']) - yield assert_raises, AttributeError, rc.__getattribute__, 'code' - yield assert_equal, rc.code1[1], 1 - yield assert_equal, rc.code1[2], 2 + with pytest.raises(AttributeError): + rc.__getattribute__('code') + assert rc.code1[1] == 1 + assert rc.code1[2] == 2 + + +def test_recoder_3(): # code and label codes = ((1, 'one'), (2, 'two')) rc = Recoder(codes) # just with implicit alias - yield assert_equal, rc.code[1], 1 - yield assert_equal, rc.code[2], 2 - yield assert_raises, KeyError, rc.code.__getitem__, 3 - yield assert_equal, rc.code['one'], 1 - yield assert_equal, rc.code['two'], 2 - yield assert_raises, KeyError, rc.code.__getitem__, 'three' - yield assert_raises, AttributeError, rc.__getattribute__, 'label' - rc = Recoder(codes, ['code1', 'label']) # with explicit column names - yield assert_raises, AttributeError, rc.__getattribute__, 'code' - yield assert_equal, rc.code1[1], 1 - yield assert_equal, rc.code1['one'], 1 - yield assert_equal, rc.label[1], 'one' - yield assert_equal, rc.label['one'], 'one' + assert rc.code[1] == 1 + assert rc.code[2] == 2 + with pytest.raises(KeyError): + rc.code.__getitem__(3) + assert rc.code['one'] == 1 + assert rc.code['two'] == 2 + with pytest.raises(KeyError): + rc.code.__getitem__('three') + with pytest.raises(AttributeError): + rc.__getattribute__('label') + +def test_recoder_3(): + # with explicit column names + codes = ((1, 'one'), (2, 'two')) + rc = Recoder(codes, ['code1', 'label']) + with pytest.raises(AttributeError): + rc.__getattribute__('code') + assert rc.code1[1] == 1 + assert rc.code1['one'] == 1 + assert rc.label[1] == 'one' + assert rc.label['one'] == 'one' + + +def test_recoder_4(): # code, label, aliases codes = ((1, 'one', '1', 'first'), (2, 'two')) rc = Recoder(codes) # just with implicit alias - yield assert_equal, rc.code[1], 1 - yield assert_equal, rc.code['one'], 1 - yield assert_equal, rc.code['first'], 1 - rc = Recoder(codes, ['code1', 'label']) # with explicit column names - yield assert_equal, rc.code1[1], 1 - yield assert_equal, rc.code1['first'], 1 - yield assert_equal, rc.label[1], 'one' - yield assert_equal, rc.label['first'], 'one' + assert rc.code[1] == 1 + assert rc.code['one'] == 1 + assert rc.code['first'] == 1 + + +def test_recoder_5(): + # with explicit column names + codes = ((1, 'one', '1', 'first'), (2, 'two')) + rc = Recoder(codes, ['code1', 'label']) + assert rc.code1[1] == 1 + assert rc.code1['first'] == 1 + assert rc.label[1] == 'one' + assert rc.label['first'] == 'one' # Don't allow funny names - yield assert_raises, KeyError, Recoder, codes, ['field1'] + with pytest.raises(KeyError): + Recoder(codes, ['field1']) def test_custom_dicter(): @@ -81,22 +106,23 @@ def values(self): # code, label, aliases codes = ((1, 'one', '1', 'first'), (2, 'two')) rc = Recoder(codes, map_maker=MyDict) - yield assert_equal, rc.code[1], 'spam' - yield assert_equal, rc.code['one'], 'spam' - yield assert_equal, rc.code['first'], 'spam' - yield assert_equal, rc.code['bizarre'], 'eggs' - yield assert_equal, rc.value_set(), set(['funny', 'list']) - yield assert_equal, list(rc.keys()), ['some', 'keys'] + assert rc.code[1] == 'spam' + assert rc.code['one'] == 'spam' + assert rc.code['first'] == 'spam' + assert rc.code['bizarre'] == 'eggs' + assert rc.value_set() == set(['funny', 'list']) + assert list(rc.keys()) == ['some', 'keys'] def test_add_codes(): codes = ((1, 'one', '1', 'first'), (2, 'two')) rc = Recoder(codes) - yield assert_equal, rc.code['two'], 2 - yield assert_raises, KeyError, rc.code.__getitem__, 'three' + assert rc.code['two'] == 2 + with pytest.raises(KeyError): + rc.code.__getitem__('three') rc.add_codes(((3, 'three'), (1, 'number 1'))) - yield assert_equal, rc.code['three'], 3 - yield assert_equal, rc.code['number 1'], 1 + assert rc.code['three'] == 3 + assert rc.code['number 1'] == 1 def test_sugar(): @@ -104,31 +130,32 @@ def test_sugar(): codes = ((1, 'one', '1', 'first'), (2, 'two')) rc = Recoder(codes) # Field1 is synonym for first named dict - yield assert_equal, rc.code, rc.field1 + assert rc.code == rc.field1 rc = Recoder(codes, fields=('code1', 'label')) - yield assert_equal, rc.code1, rc.field1 + assert rc.code1 == rc.field1 # Direct key access identical to key access for first named - yield assert_equal, rc[1], rc.field1[1] - yield assert_equal, rc['two'], rc.field1['two'] + assert rc[1] == rc.field1[1] + assert rc['two'] == rc.field1['two'] # keys gets all keys - yield assert_equal, set(rc.keys()), set((1, 'one', '1', 'first', 2, 'two')) + assert set(rc.keys()) == set((1, 'one', '1', 'first', 2, 'two')) # value_set gets set of values from first column - yield assert_equal, rc.value_set(), set((1, 2)) + assert rc.value_set() == set((1, 2)) # or named column if given - yield assert_equal, rc.value_set('label'), set(('one', 'two')) + assert rc.value_set('label') == set(('one', 'two')) # "in" works for values in and outside the set - yield assert_true, 'one' in rc - yield assert_false, 'three' in rc + assert 'one' in rc + assert 'three' not in rc def test_dtmapper(): # dict-like that will lookup on dtypes, even if they don't hash properly d = DtypeMapper() - assert_raises(KeyError, d.__getitem__, 1) + with pytest.raises(KeyError): + d.__getitem__(1) d[1] = 'something' - assert_equal(d[1], 'something') - assert_equal(list(d.keys()), [1]) - assert_equal(list(d.values()), ['something']) + assert d[1] == 'something' + assert list(d.keys()) == [1] + assert list(d.values()) == ['something'] intp_dt = np.dtype('intp') if intp_dt == np.dtype('int32'): canonical_dt = np.dtype('int32') @@ -139,21 +166,23 @@ def test_dtmapper(): native_dt = canonical_dt.newbyteorder('=') explicit_dt = canonical_dt.newbyteorder(native_code) d[canonical_dt] = 'spam' - assert_equal(d[canonical_dt], 'spam') - assert_equal(d[native_dt], 'spam') - assert_equal(d[explicit_dt], 'spam') + assert d[canonical_dt] == 'spam' + assert d[native_dt] == 'spam' + assert d[explicit_dt] == 'spam' + # Test keys, values d = DtypeMapper() - assert_equal(list(d.keys()), []) - assert_equal(list(d.keys()), []) + assert list(d.keys()) == [] + assert list(d.keys()) == [] d[canonical_dt] = 'spam' - assert_equal(list(d.keys()), [canonical_dt]) - assert_equal(list(d.values()), ['spam']) + assert list(d.keys()) == [canonical_dt] + assert list(d.values()) == ['spam'] # With other byte order d = DtypeMapper() sw_dt = canonical_dt.newbyteorder(swapped_code) d[sw_dt] = 'spam' - assert_raises(KeyError, d.__getitem__, canonical_dt) - assert_equal(d[sw_dt], 'spam') + with pytest.raises(KeyError): + d.__getitem__(canonical_dt) + assert d[sw_dt] == 'spam' sw_intp_dt = intp_dt.newbyteorder(swapped_code) - assert_equal(d[sw_intp_dt], 'spam') + assert d[sw_intp_dt] == 'spam' diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index a0c3484a3a..17f40d8395 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,5 +1,5 @@ from ..pkg_info import cmp_pkg_version -from ..testing import assert_raises, assert_false +import pytest MODULE_SCHEDULE = [ ('5.0.0', ['nibabel.keywordonly']), @@ -26,8 +26,9 @@ def test_module_removal(): for version, to_remove in MODULE_SCHEDULE: if cmp_pkg_version(version) < 1: for module in to_remove: - with assert_raises(ImportError, msg="Time to remove " + module): + with pytest.raises(ImportError): __import__(module) + pytest.fail("Time to remove " + module) def test_object_removal(): @@ -38,7 +39,7 @@ def test_object_removal(): module = __import__(module_name) except ImportError: continue - assert_false(hasattr(module, obj), msg="Time to remove %s.%s" % (module_name, obj)) + assert not hasattr(module, obj), "Time to remove %s.%s" % (module_name, obj) def test_attribute_removal(): @@ -53,5 +54,4 @@ def test_attribute_removal(): klass = getattr(module, cls) except AttributeError: continue - assert_false(hasattr(klass, attr), - msg="Time to remove %s.%s.%s" % (module_name, cls, attr)) + assert not hasattr(klass, attr), "Time to remove %s.%s.%s" % (module_name, cls, attr) diff --git a/nibabel/tests/test_round_trip.py b/nibabel/tests/test_round_trip.py index 5c3a12b086..79d785932d 100644 --- a/nibabel/tests/test_round_trip.py +++ b/nibabel/tests/test_round_trip.py @@ -11,8 +11,6 @@ from ..arraywriters import ScalingError from ..casting import best_float, ulp, type_info -from nose.tools import assert_true - from numpy.testing import assert_array_equal DEBUG = True @@ -193,4 +191,4 @@ def check_arr(test_id, V_in, in_type, out_type, scaling_type): slope, inter) # To help debugging failures with --pdb-failure np.nonzero(all_fails) - assert_true(this_test) + assert this_test diff --git a/nibabel/tests/test_rstutils.py b/nibabel/tests/test_rstutils.py index 51103c45ca..4fb83d3170 100644 --- a/nibabel/tests/test_rstutils.py +++ b/nibabel/tests/test_rstutils.py @@ -5,49 +5,46 @@ from ..rstutils import rst_table -from nose.tools import assert_equal, assert_raises +import pytest def test_rst_table(): # Tests for printable table function R, C = 3, 4 cell_values = np.arange(R * C).reshape((R, C)) - assert_equal(rst_table(cell_values), + assert (rst_table(cell_values) == """+--------+--------+--------+--------+--------+ | | col[0] | col[1] | col[2] | col[3] | +========+========+========+========+========+ | row[0] | 0.00 | 1.00 | 2.00 | 3.00 | | row[1] | 4.00 | 5.00 | 6.00 | 7.00 | | row[2] | 8.00 | 9.00 | 10.00 | 11.00 | -+--------+--------+--------+--------+--------+""" - ) - assert_equal(rst_table(cell_values, ['a', 'b', 'c']), ++--------+--------+--------+--------+--------+""") + assert (rst_table(cell_values, ['a', 'b', 'c']) == """+---+--------+--------+--------+--------+ | | col[0] | col[1] | col[2] | col[3] | +===+========+========+========+========+ | a | 0.00 | 1.00 | 2.00 | 3.00 | | b | 4.00 | 5.00 | 6.00 | 7.00 | | c | 8.00 | 9.00 | 10.00 | 11.00 | -+---+--------+--------+--------+--------+""" - ) - assert_raises(ValueError, - rst_table, cell_values, ['a', 'b']) - assert_raises(ValueError, - rst_table, cell_values, ['a', 'b', 'c', 'd']) - assert_equal(rst_table(cell_values, None, ['1', '2', '3', '4']), ++---+--------+--------+--------+--------+""") + with pytest.raises(ValueError): + rst_table(cell_values, ['a', 'b']) + with pytest.raises(ValueError): + rst_table(cell_values, ['a', 'b', 'c', 'd']) + assert (rst_table(cell_values, None, ['1', '2', '3', '4']) == """+--------+-------+-------+-------+-------+ | | 1 | 2 | 3 | 4 | +========+=======+=======+=======+=======+ | row[0] | 0.00 | 1.00 | 2.00 | 3.00 | | row[1] | 4.00 | 5.00 | 6.00 | 7.00 | | row[2] | 8.00 | 9.00 | 10.00 | 11.00 | -+--------+-------+-------+-------+-------+""" - ) - assert_raises(ValueError, - rst_table, cell_values, None, ['1', '2', '3']) - assert_raises(ValueError, - rst_table, cell_values, None, list('12345')) - assert_equal(rst_table(cell_values, title='A title'), ++--------+-------+-------+-------+-------+""") + with pytest.raises(ValueError): + rst_table(cell_values, None, ['1', '2', '3']) + with pytest.raises(ValueError): + rst_table(cell_values, None, list('12345')) + assert (rst_table(cell_values, title='A title') == """******* A title ******* @@ -58,29 +55,26 @@ def test_rst_table(): | row[0] | 0.00 | 1.00 | 2.00 | 3.00 | | row[1] | 4.00 | 5.00 | 6.00 | 7.00 | | row[2] | 8.00 | 9.00 | 10.00 | 11.00 | -+--------+--------+--------+--------+--------+""" - ) - assert_equal(rst_table(cell_values, val_fmt='{0}'), ++--------+--------+--------+--------+--------+""") + assert (rst_table(cell_values, val_fmt='{0}') == """+--------+--------+--------+--------+--------+ | | col[0] | col[1] | col[2] | col[3] | +========+========+========+========+========+ | row[0] | 0 | 1 | 2 | 3 | | row[1] | 4 | 5 | 6 | 7 | | row[2] | 8 | 9 | 10 | 11 | -+--------+--------+--------+--------+--------+""" - ) ++--------+--------+--------+--------+--------+""") # Doing a fancy cell format cell_values_back = np.arange(R * C)[::-1].reshape((R, C)) cell_3d = np.dstack((cell_values, cell_values_back)) - assert_equal(rst_table(cell_3d, val_fmt='{0[0]}-{0[1]}'), + assert (rst_table(cell_3d, val_fmt='{0[0]}-{0[1]}') == """+--------+--------+--------+--------+--------+ | | col[0] | col[1] | col[2] | col[3] | +========+========+========+========+========+ | row[0] | 0-11 | 1-10 | 2-9 | 3-8 | | row[1] | 4-7 | 5-6 | 6-5 | 7-4 | | row[2] | 8-3 | 9-2 | 10-1 | 11-0 | -+--------+--------+--------+--------+--------+""" - ) ++--------+--------+--------+--------+--------+""") # Test formatting characters formats = dict( down='!', @@ -88,7 +82,7 @@ def test_rst_table(): thick_long='~', cross='%', title_heading='#') - assert_equal(rst_table(cell_values, title='A title', format_chars=formats), + assert (rst_table(cell_values, title='A title', format_chars=formats) == """####### A title ####### @@ -99,10 +93,7 @@ def test_rst_table(): ! row[0] ! 0.00 ! 1.00 ! 2.00 ! 3.00 ! ! row[1] ! 4.00 ! 5.00 ! 6.00 ! 7.00 ! ! row[2] ! 8.00 ! 9.00 ! 10.00 ! 11.00 ! -%________%________%________%________%________%""" - ) +%________%________%________%________%________%""") formats['funny_value'] = '!' - assert_raises(ValueError, - rst_table, - cell_values, title='A title', format_chars=formats) - return + with pytest.raises(ValueError): + rst_table(cell_values, title='A title', format_chars=formats) From 152abea74e0767c54cb5b6b38713fe4c35b07322 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Sun, 1 Dec 2019 11:25:16 -0500 Subject: [PATCH 456/689] converting test to pytest, TODO: some tests are failing on osx --- nibabel/tests/test_scaling.py | 249 ++++++++++++++++++---------------- 1 file changed, 130 insertions(+), 119 deletions(-) diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index cd66bbfe3a..443fbfd729 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -13,97 +13,99 @@ from io import BytesIO from ..volumeutils import finite_range, apply_read_scaling, array_to_file, array_from_file from ..casting import type_info -from ..testing import suppress_warnings +from ..testing_pytest import suppress_warnings from .test_volumeutils import _calculate_scale from numpy.testing import (assert_array_almost_equal, assert_array_equal) - -from nose.tools import (assert_true, assert_equal, assert_raises, - assert_not_equal) +import pytest # Debug print statements DEBUG = True -def test_finite_range(): +@pytest.mark.parametrize("in_arr, res", [ + ([[-1, 0, 1], [np.inf, np.nan, -np.inf]], (-1, 1)), + (np.array([[-1, 0, 1], [np.inf, np.nan, -np.inf]]), (-1, 1)), + ([[np.nan], [np.nan]], (np.inf, -np.inf)), # all nans slices + (np.zeros((3, 4, 5)) + np.nan, (np.inf, -np.inf)), + ([[-np.inf], [np.inf]], (np.inf, -np.inf)), # all infs slices + (np.zeros((3, 4, 5)) + np.inf, (np.inf, -np.inf)), + ([[np.nan, -1, 2], [-2, np.nan, 1]], (-2, 2)), + ([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)), + ([[-np.inf, 2], [np.nan, 1]], (1, 2)), # good max case + ([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)), + ([np.nan], (np.inf, -np.inf)), + ([np.inf], (np.inf, -np.inf)), + ([-np.inf], (np.inf, -np.inf)), + ([np.inf, 1], (1, 1)), # only look at finite values + ([-np.inf, 1], (1, 1)), + ([[], []], (np.inf, -np.inf)), # empty array + (np.array([[-3, 0, 1], [2, -1, 4]], dtype=np.int), (-3, 4)), + (np.array([[1, 0, 1], [2, 3, 4]], dtype=np.uint), (0, 4)), + ([0., 1, 2, 3], (0, 3)), + # Complex comparison works as if they are floats + ([[np.nan, -1 - 100j, 2], [-2, np.nan, 1 + 100j]], (-2, 2)), + ([[np.nan, -1, 2 - 100j], [-2 + 100j, np.nan, 1]], (-2 + 100j, 2 - 100j)), +]) +def test_finite_range(in_arr, res): # Finite range utility function - for in_arr, res in ( - ([[-1, 0, 1], [np.inf, np.nan, -np.inf]], (-1, 1)), - (np.array([[-1, 0, 1], [np.inf, np.nan, -np.inf]]), (-1, 1)), - ([[np.nan], [np.nan]], (np.inf, -np.inf)), # all nans slices - (np.zeros((3, 4, 5)) + np.nan, (np.inf, -np.inf)), - ([[-np.inf], [np.inf]], (np.inf, -np.inf)), # all infs slices - (np.zeros((3, 4, 5)) + np.inf, (np.inf, -np.inf)), - ([[np.nan, -1, 2], [-2, np.nan, 1]], (-2, 2)), - ([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)), - ([[-np.inf, 2], [np.nan, 1]], (1, 2)), # good max case - ([[np.nan, -np.inf, 2], [-2, np.nan, np.inf]], (-2, 2)), - ([np.nan], (np.inf, -np.inf)), - ([np.inf], (np.inf, -np.inf)), - ([-np.inf], (np.inf, -np.inf)), - ([np.inf, 1], (1, 1)), # only look at finite values - ([-np.inf, 1], (1, 1)), - ([[], []], (np.inf, -np.inf)), # empty array - (np.array([[-3, 0, 1], [2, -1, 4]], dtype=np.int), (-3, 4)), - (np.array([[1, 0, 1], [2, 3, 4]], dtype=np.uint), (0, 4)), - ([0., 1, 2, 3], (0, 3)), - # Complex comparison works as if they are floats - ([[np.nan, -1 - 100j, 2], [-2, np.nan, 1 + 100j]], (-2, 2)), - ([[np.nan, -1, 2 - 100j], [-2 + 100j, np.nan, 1]], (-2 + 100j, 2 - 100j)), - ): - assert_equal(finite_range(in_arr), res) - assert_equal(finite_range(in_arr, False), res) - assert_equal(finite_range(in_arr, check_nan=False), res) - has_nan = np.any(np.isnan(in_arr)) - assert_equal(finite_range(in_arr, True), res + (has_nan,)) - assert_equal(finite_range(in_arr, check_nan=True), res + (has_nan,)) - in_arr = np.array(in_arr) - flat_arr = in_arr.ravel() - assert_equal(finite_range(flat_arr), res) - assert_equal(finite_range(flat_arr, True), res + (has_nan,)) - # Check float types work as complex - if in_arr.dtype.kind == 'f': - c_arr = in_arr.astype(np.complex) - assert_equal(finite_range(c_arr), res) - assert_equal(finite_range(c_arr, True), res + (has_nan,)) + assert finite_range(in_arr) == res + assert finite_range(in_arr, False) == res + assert finite_range(in_arr, check_nan=False) == res + has_nan = np.any(np.isnan(in_arr)) + assert finite_range(in_arr, True) == res + (has_nan,) + assert finite_range(in_arr, check_nan=True) == res + (has_nan,) + in_arr = np.array(in_arr) + flat_arr = in_arr.ravel() + assert finite_range(flat_arr) == res + assert finite_range(flat_arr, True) == res + (has_nan,) + # Check float types work as complex + if in_arr.dtype.kind == 'f': + c_arr = in_arr.astype(np.complex) + assert finite_range(c_arr) == res + assert finite_range(c_arr, True) == res + (has_nan,) + + +def test_finite_range_err(): # Test error cases a = np.array([[1., 0, 1], [2, 3, 4]]).view([('f1', 'f')]) - assert_raises(TypeError, finite_range, a) + with pytest.raises(TypeError): + finite_range(a) -def test_a2f_mn_mx(): +@pytest.mark.parametrize("out_type", [np.int16, np.float32]) +def test_a2f_mn_mx(out_type): # Test array to file mn, mx handling str_io = BytesIO() - for out_type in (np.int16, np.float32): - arr = np.arange(6, dtype=out_type) - arr_orig = arr.copy() # safe backup for testing against - # Basic round trip to warm up - array_to_file(arr, str_io) - data_back = array_from_file(arr.shape, out_type, str_io) - assert_array_equal(arr, data_back) - # Clip low - array_to_file(arr, str_io, mn=2) - data_back = array_from_file(arr.shape, out_type, str_io) - # arr unchanged - assert_array_equal(arr, arr_orig) - # returned value clipped low - assert_array_equal(data_back, [2, 2, 2, 3, 4, 5]) - # Clip high - array_to_file(arr, str_io, mx=4) - data_back = array_from_file(arr.shape, out_type, str_io) - # arr unchanged - assert_array_equal(arr, arr_orig) - # returned value clipped high - assert_array_equal(data_back, [0, 1, 2, 3, 4, 4]) - # Clip both - array_to_file(arr, str_io, mn=2, mx=4) - data_back = array_from_file(arr.shape, out_type, str_io) - # arr unchanged - assert_array_equal(arr, arr_orig) - # returned value clipped high - assert_array_equal(data_back, [2, 2, 2, 3, 4, 4]) + arr = np.arange(6, dtype=out_type) + arr_orig = arr.copy() # safe backup for testing against + # Basic round trip to warm up + array_to_file(arr, str_io) + data_back = array_from_file(arr.shape, out_type, str_io) + assert_array_equal(arr, data_back) + # Clip low + array_to_file(arr, str_io, mn=2) + data_back = array_from_file(arr.shape, out_type, str_io) + # arr unchanged + assert_array_equal(arr, arr_orig) + # returned value clipped low + assert_array_equal(data_back, [2, 2, 2, 3, 4, 5]) + # Clip high + array_to_file(arr, str_io, mx=4) + data_back = array_from_file(arr.shape, out_type, str_io) + # arr unchanged + assert_array_equal(arr, arr_orig) + # returned value clipped high + assert_array_equal(data_back, [0, 1, 2, 3, 4, 4]) + # Clip both + array_to_file(arr, str_io, mn=2, mx=4) + data_back = array_from_file(arr.shape, out_type, str_io) + # arr unchanged + assert_array_equal(arr, arr_orig) + # returned value clipped high + assert_array_equal(data_back, [2, 2, 2, 3, 4, 4]) def test_a2f_nan2zero(): @@ -129,54 +131,63 @@ def test_a2f_nan2zero(): assert_array_equal(data_back, [np.array(np.nan).astype(np.int32), 99]) -def test_array_file_scales(): +@pytest.mark.parametrize("in_type, out_type, err", [ + (np.int16, np.int16, None), + (np.int16, np.int8, None), + (np.uint16, np.uint8, None), + (np.int32, np.int8, None), + (np.float32, np.uint8, None), + (np.float32, np.int16, None) +]) +def test_array_file_scales(in_type, out_type, err): # Test scaling works for max, min when going from larger to smaller type, # and from float to integer. bio = BytesIO() - for in_type, out_type, err in ((np.int16, np.int16, None), - (np.int16, np.int8, None), - (np.uint16, np.uint8, None), - (np.int32, np.int8, None), - (np.float32, np.uint8, None), - (np.float32, np.int16, None)): - out_dtype = np.dtype(out_type) - arr = np.zeros((3,), dtype=in_type) - info = type_info(in_type) - arr[0], arr[1] = info['min'], info['max'] - if not err is None: - assert_raises(err, _calculate_scale, arr, out_dtype, True) - continue - slope, inter, mn, mx = _calculate_scale(arr, out_dtype, True) - array_to_file(arr, bio, out_type, 0, inter, slope, mn, mx) - bio.seek(0) - arr2 = array_from_file(arr.shape, out_dtype, bio) - arr3 = apply_read_scaling(arr2, slope, inter) - # Max rounding error for integer type - max_miss = slope / 2. - assert_true(np.all(np.abs(arr - arr3) <= max_miss)) - bio.truncate(0) - bio.seek(0) - - -def test_scaling_in_abstract(): + out_dtype = np.dtype(out_type) + arr = np.zeros((3,), dtype=in_type) + info = type_info(in_type) + arr[0], arr[1] = info['min'], info['max'] + if not err is None: + with pytest.raises(err): + _calculate_scale, arr, out_dtype, True) + return + slope, inter, mn, mx = _calculate_scale(arr, out_dtype, True) + array_to_file(arr, bio, out_type, 0, inter, slope, mn, mx) + bio.seek(0) + arr2 = array_from_file(arr.shape, out_dtype, bio) + arr3 = apply_read_scaling(arr2, slope, inter) + # Max rounding error for integer type + max_miss = slope / 2. + assert np.all(np.abs(arr - arr3) <= max_miss) + bio.truncate(0) + bio.seek(0) + + +@pytest.mark.parametrize("category0, category1",[ + ('int', 'int'), + ('uint', 'int'), +]) +def test_scaling_in_abstract(category0, category1): # Confirm that, for all ints and uints as input, and all possible outputs, # for any simple way of doing the calculation, the result is near enough - for category0, category1 in (('int', 'int'), - ('uint', 'int'), - ): - for in_type in np.sctypes[category0]: - for out_type in np.sctypes[category1]: - check_int_a2f(in_type, out_type) + for in_type in np.sctypes[category0]: + for out_type in np.sctypes[category1]: + check_int_a2f(in_type, out_type) + + +@pytest.mark.parametrize("category0, category1", [ + ('float', 'int'), + ('float', 'uint'), + ('complex', 'int'), + ('complex', 'uint'), +]) +def test_scaling_in_abstract_warn(category0, category1): + # Converting floats to integer - for category0, category1 in (('float', 'int'), - ('float', 'uint'), - ('complex', 'int'), - ('complex', 'uint'), - ): - for in_type in np.sctypes[category0]: - for out_type in np.sctypes[category1]: - with suppress_warnings(): # overflow - check_int_a2f(in_type, out_type) + for in_type in np.sctypes[category0]: + for out_type in np.sctypes[category1]: + with suppress_warnings(): # overflow + check_int_a2f(in_type, out_type) def check_int_a2f(in_type, out_type): @@ -205,7 +216,7 @@ def check_int_a2f(in_type, out_type): array_to_file(data, str_io, out_type, 0, inter, scale, mn, mx) data_back = array_from_file(data.shape, out_type, str_io) data_back = apply_read_scaling(data_back, scale, inter) - assert_true(np.allclose(big_floater(data), big_floater(data_back))) + assert np.allclose(big_floater(data), big_floater(data_back)) # Try with analyze-size scale and inter scale32 = np.float32(scale) inter32 = np.float32(inter) @@ -216,5 +227,5 @@ def check_int_a2f(in_type, out_type): # Clip at extremes to remove inf info = type_info(in_type) out_min, out_max = info['min'], info['max'] - assert_true(np.allclose(big_floater(data), - big_floater(np.clip(data_back, out_min, out_max)))) + assert np.allclose(big_floater(data), + big_floater(np.clip(data_back, out_min, out_max))) From 2ebef2665ec561d21fa5aae0ac673fd301b73aeb Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Sun, 1 Dec 2019 13:06:03 -0500 Subject: [PATCH 457/689] adding pytestmark to test that has not been changed to pytest --- nibabel/tests/test_scripts.py | 1 + nibabel/tests/test_spaces.py | 2 +- nibabel/tests/test_spatialimages.py | 2 +- nibabel/tests/test_spm2analyze.py | 2 +- nibabel/tests/test_spm99analyze.py | 1 + nibabel/tests/test_testing.py | 2 +- nibabel/tests/test_tmpdirs.py | 1 + nibabel/tests/test_trackvis.py | 2 +- nibabel/tests/test_tripwire.py | 2 +- nibabel/tests/test_viewers.py | 1 + nibabel/tests/test_volumeutils.py | 1 + 11 files changed, 11 insertions(+), 6 deletions(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 431e606380..5e7defdaa5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -33,6 +33,7 @@ from .test_parrec_data import BALLS, AFF_OFF from ..testing_pytest import assert_data_similar +import pytest; pytestmark = pytest.mark.skip() def _proc_stdout(stdout): stdout_str = stdout.decode('latin1').strip() diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 2520d32225..119ecfd5c3 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -15,7 +15,7 @@ from nose.tools import (assert_true, assert_false, assert_raises, assert_equal, assert_not_equal) - +import pytest; pytestmark = pytest.mark.skip() def assert_all_in(in_shape, in_affine, out_shape, out_affine): slices = tuple(slice(N) for N in in_shape) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 3f9c8b4b97..5b4706bea1 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -30,7 +30,7 @@ from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError from .. import load as top_load - +import pytest; pytestmark = pytest.mark.skip() def test_header_init(): # test the basic header diff --git a/nibabel/tests/test_spm2analyze.py b/nibabel/tests/test_spm2analyze.py index e39e79b96e..ddf2956f7d 100644 --- a/nibabel/tests/test_spm2analyze.py +++ b/nibabel/tests/test_spm2analyze.py @@ -18,7 +18,7 @@ from ..testing import assert_equal, assert_raises from . import test_spm99analyze - +import pytest; pytestmark = pytest.mark.skip() class TestSpm2AnalyzeHeader(test_spm99analyze.TestSpm99AnalyzeHeader): header_class = Spm2AnalyzeHeader diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 71fe41e2ec..ecc63e5935 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -32,6 +32,7 @@ from ..testing_pytest import bytesio_round_trip, bytesio_filemap from . import test_analyze +import pytest; pytestmark = pytest.mark.skip() FLOAT_TYPES = np.sctypes['float'] COMPLEX_TYPES = np.sctypes['complex'] diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index e9f2d079ea..171447560e 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -11,7 +11,7 @@ from ..testing import (error_warnings, suppress_warnings, clear_and_catch_warnings, assert_allclose_safely, get_fresh_mod, assert_re_in, test_data, data_path) - +import pytest; pytestmark = pytest.mark.skip() def test_assert_allclose_safely(): # Test the safe version of allclose diff --git a/nibabel/tests/test_tmpdirs.py b/nibabel/tests/test_tmpdirs.py index 1d35b59269..7914e273e1 100644 --- a/nibabel/tests/test_tmpdirs.py +++ b/nibabel/tests/test_tmpdirs.py @@ -6,6 +6,7 @@ from ..tmpdirs import InGivenDirectory from nose.tools import assert_true, assert_equal +import pytest; pytestmark = pytest.mark.skip() MY_PATH = abspath(__file__) MY_DIR = dirname(MY_PATH) diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index 076e22f74e..676ee52d87 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -12,7 +12,7 @@ from numpy.testing import assert_array_almost_equal from ..testing import (assert_true, assert_false, assert_equal, assert_raises, assert_warns, assert_array_equal, suppress_warnings) - +import pytest; pytestmark = pytest.mark.skip() def test_write(): streams = [] diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index 05d3b1eb3f..990f0bbf39 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -5,7 +5,7 @@ from nose.tools import (assert_true, assert_false, assert_raises, assert_equal, assert_not_equal) - +import pytest; pytestmark = pytest.mark.skip() def test_is_tripwire(): assert_false(is_tripwire(object())) diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index 68710b3126..0e3f076223 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -18,6 +18,7 @@ from numpy.testing import assert_array_equal, assert_equal from nose.tools import assert_raises, assert_true +import pytest; pytestmark = pytest.mark.skip() # Need at least MPL 1.3 for viewer tests. matplotlib, has_mpl, _ = optional_package('matplotlib', min_version='1.3') diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index d391abf359..711894aabc 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -61,6 +61,7 @@ from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, suppress_warnings, clear_and_catch_warnings) +import pytest; pytestmark = pytest.mark.skip() #: convenience variables for numpy types FLOAT_TYPES = np.sctypes['float'] From d68ba9d675cf03993246070c8677cd9ce146a1cc Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Sun, 1 Dec 2019 15:51:34 -0500 Subject: [PATCH 458/689] fixing test_wrapstruct: removing setUpClass and adding self.header_class check to all of the test methods: couldnt find better way for suppressing tests from abstract classes and allowing for testing from all child classes --- nibabel/tests/test_wrapstruct.py | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index b5ccdd2907..f627109fd0 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -118,13 +118,9 @@ def get_bad_bb(self): # means do not check return None - @classmethod - def setUpClass(cls): - if cls.header_class is None: - raise SkipTest("no testing methods from the abstract class") - - def test_general_init(self): + if not self.header_class: + pytest.skip() hdr = self.header_class() # binaryblock has length given by header data dtype binblock = hdr.binaryblock @@ -144,6 +140,8 @@ def _set_something_into_hdr(self, hdr): def test__eq__(self): # Test equal and not equal + if not self.header_class: + pytest.skip() hdr1 = self.header_class() hdr2 = self.header_class() assert hdr1 == hdr2 @@ -160,6 +158,8 @@ def test__eq__(self): def test_to_from_fileobj(self): # Successful write using write_to + if not self.header_class: + pytest.skip() hdr = self.header_class() str_io = BytesIO() hdr.write_to(str_io) @@ -169,6 +169,8 @@ def test_to_from_fileobj(self): assert hdr2.binaryblock == hdr.binaryblock def test_mappingness(self): + if not self.header_class: + pytest.skip() hdr = self.header_class() with pytest.raises(ValueError): hdr.__setitem__('nonexistent key', 0.1) @@ -205,12 +207,16 @@ def test_endianness_ro(self): endianness on initialization (or occasionally byteswapping the data) - but this is done via via the as_byteswapped method ''' + if not self.header_class: + pytest.skip() hdr = self.header_class() with pytest.raises(AttributeError): hdr.__setattr__('endianness', '<') def test_endian_guess(self): # Check guesses of endian + if not self.header_class: + pytest.skip() eh = self.header_class() assert eh.endianness == native_code hdr_data = eh.structarr.copy() @@ -225,6 +231,8 @@ def test_binblock_is_file(self): # strings following. More generally, there may be other perhaps # optional data after the binary block, in which case you will need to # override this test + if not self.header_class: + pytest.skip() hdr = self.header_class() str_io = BytesIO() hdr.write_to(str_io) @@ -232,6 +240,8 @@ def test_binblock_is_file(self): def test_structarr(self): # structarr attribute also read only + if not self.header_class: + pytest.skip() hdr = self.header_class() # Just check we can get structarr hdr.structarr @@ -250,6 +260,8 @@ def assert_no_log_err(self, hdr): def test_bytes(self): # Test get of bytes + if not self.header_class: + pytest.skip() hdr1 = self.header_class() bb = hdr1.binaryblock hdr2 = self.header_class(hdr1.binaryblock) @@ -280,6 +292,8 @@ def test_bytes(self): def test_as_byteswapped(self): # Check byte swapping + if not self.header_class: + pytest.skip() hdr = self.header_class() assert hdr.endianness == native_code # same code just returns a copy @@ -304,6 +318,8 @@ def check_fix(self, *args, **kwargs): def test_empty_check(self): # Empty header should be error free + if not self.header_class: + pytest.skip() hdr = self.header_class() hdr.check_fix(error_level=0) @@ -313,6 +329,8 @@ def _dxer(self, hdr): return self.header_class.diagnose_binaryblock(binblock) def test_str(self): + if not self.header_class: + pytest.skip() hdr = self.header_class() # Check something returns from str s1 = str(hdr) @@ -326,6 +344,8 @@ class _TestLabeledWrapStruct(_TestWrapStructBase): def test_get_value_label(self): # Test get value label method # Make a new class to avoid overwriting recoders of original + if not self.header_class: + pytest.skip() class MyHdr(self.header_class): _field_recoders = {} hdr = MyHdr() From 4f2aa9aef73edc0e44a8f601fb44f3bc9085fee4 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Sun, 1 Dec 2019 15:53:08 -0500 Subject: [PATCH 459/689] changing travis command after fixes in test_wrapstruct --- .travis.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index f4fff97719..ca6b4729dc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -130,11 +130,7 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests/test_a*.py ../nibabel/tests/test_b*.py ../nibabel/tests/test_c*.py ../nibabel/tests/test_d*.py - pytest -v../nibabel/tests/test_e*.py ../nibabel/tests/test_f*.py ../nibabel/tests/test_h*.py - pytest -v../nibabel/tests/test_i*.py ../nibabel/tests/test_k*.py ../nibabel/tests/test_l*.py ../nibabel/tests/test_m*.py - pytest -v../nibabel/tests/test_n*.py ../nibabel/tests/test_o*.py ../nibabel/tests/test_p*.py ../nibabel/tests/test_q*.py - pytest -v ../nibabel/tests/test_w*.py + pytest -v ../nibabel/tests else false fi From e211a477ee94d1ea2fd1b2e447b46fe11b7d36e2 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 14:29:08 -0500 Subject: [PATCH 460/689] MNT: Separate out pytest and nose jobs --- .azure-pipelines/windows.yml | 10 +++++++--- .travis.yml | 13 +++++++++++-- setup.cfg | 9 +++++---- 3 files changed, 23 insertions(+), 9 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index f825bef612..d96b40a714 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -29,17 +29,21 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% - python -m pip install nose mock coverage codecov pytest + python -m pip install nose coverage codecov pytest displayName: 'Install dependencies' - script: | - python -m pip install . + python -m pip install '.[$(CHECK_TYPE)]' SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - script: | mkdir for_testing cd for_testing cp ../.coveragerc . - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + if %CHECK_TYPE%=="nosetests" ( + nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + ) else ( + pytest --doctest-modules --cov nibabel -v --pyargs nibabel --deselect streamlines + ) displayName: 'Nose tests' - script: | cd for_testing diff --git a/.travis.yml b/.travis.yml index ca6b4729dc..aed8cf06a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,6 +28,10 @@ python: matrix: include: + # Old nosetests - Remove soon + - python: 3.7 + env: + - CHECK_TYPE="nosetests" # Basic dependencies only - python: 3.5 env: @@ -124,13 +128,18 @@ script: cd doc make html; make doctest; - elif [ "${CHECK_TYPE}" == "test" ]; then + elif [ "${CHECK_TYPE}" == "nosetests" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - pytest -v ../nibabel/tests + elif [ "${CHECK_TYPE}" == "test" ]; then + # Change into an innocuous directory and find tests from installation + mkdir for_testing + cd for_testing + cp ../.coveragerc . + pytest --doctest-modules --cov nibabel -v --pyargs nibabel --deselect streamlines else false fi diff --git a/setup.cfg b/setup.cfg index d425dd2371..5719650bf8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -32,10 +32,6 @@ python_requires = >=3.5.1 install_requires = numpy >=1.13 packaging >=14.3 -tests_require = - nose >=0.11 - pytest -test_suite = nose.collector zip_safe = False packages = find: @@ -59,10 +55,15 @@ spm = scipy style = flake8 +nosetests = + coverage + nose >=0.11 + pytest test = coverage nose >=0.11 pytest + pytest-cov all = %(dicomfs)s %(dev)s From df6a28ec2015d00ed0fb3c0bd130aae9b8586b61 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 14:34:40 -0500 Subject: [PATCH 461/689] TEST: Fix tests broken in the rebase --- nibabel/tests/test_minc1.py | 2 +- nibabel/tests/test_processing.py | 5 +++-- nibabel/tests/test_scaling.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index 5b53d021c4..837957e566 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -112,7 +112,7 @@ def test_old_namespace(): MincImage(arr, aff) # Another old name from ..minc1 import MincFile, Minc1File - assert_false(MincFile is Minc1File) + assert MincFile is not Minc1File with pytest.raises(ExpiredDeprecationError): mf = MincFile(netcdf_file(EG_FNAME)) diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index 58c2573d63..dea34b85a2 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -172,7 +172,8 @@ def test_resample_from_to(): assert out.__class__ == Nifti1Image # From 2D to 3D, error, the fixed affine is not invertible img_2d = Nifti1Image(data[:, :, 0], affine) - assert_raises(AffineError, resample_from_to, img_2d, img) + with pytest.raises(AffineError): + resample_from_to(img_2d, img) # 3D to 2D, we don't need to invert the fixed matrix out = resample_from_to(img, img_2d) assert_array_equal(out.dataobj, data[:, :, 0]) @@ -186,7 +187,7 @@ def test_resample_from_to(): assert_almost_equal(data_4d, out.dataobj) assert_array_equal(img_4d.affine, out.affine) # Errors trying to match 3D to 4D - with pytest.rises(ValueError): + with pytest.raises(ValueError): resample_from_to(img_4d, img) with pytest.raises(ValueError): resample_from_to(img, img_4d) diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index 443fbfd729..ec335e5c24 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -149,7 +149,7 @@ def test_array_file_scales(in_type, out_type, err): arr[0], arr[1] = info['min'], info['max'] if not err is None: with pytest.raises(err): - _calculate_scale, arr, out_dtype, True) + _calculate_scale(arr, out_dtype, True) return slope, inter, mn, mx = _calculate_scale(arr, out_dtype, True) array_to_file(arr, bio, out_type, 0, inter, slope, mn, mx) From bfb63fcb37459e69bd5eb34baded1515af1d2b22 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 14:36:29 -0500 Subject: [PATCH 462/689] CI: Add CHECK_TYPE to Azure --- .azure-pipelines/windows.yml | 3 ++- azure-pipelines.yml | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index d96b40a714..f7b316932a 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -11,6 +11,7 @@ jobs: variables: EXTRA_WHEELS: "https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" DEPENDS: numpy scipy matplotlib h5py pydicom + CHECK_TYPE: test strategy: matrix: ${{ insert }}: ${{ parameters.matrix }} @@ -32,7 +33,7 @@ jobs: python -m pip install nose coverage codecov pytest displayName: 'Install dependencies' - script: | - python -m pip install '.[$(CHECK_TYPE)]' + python -m pip install .[$(CHECK_TYPE)] SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - script: | diff --git a/azure-pipelines.yml b/azure-pipelines.yml index d09c5b7740..2ef2539c74 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -34,3 +34,7 @@ jobs: py38-x64: PYTHON_VERSION: '3.8' PYTHON_ARCH: 'x64' + nosetests: + PYTHON_VERSION: '3.6' + PYTHON_ARCH: 'x64' + CHECK_TYPE: 'nosetests' From 75298a43a28d4e745cb70c82181a7c5254985817 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 14:44:48 -0500 Subject: [PATCH 463/689] CI: Default to nose for Windows for now --- .azure-pipelines/windows.yml | 2 +- azure-pipelines.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index f7b316932a..363cd64eb4 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -11,7 +11,7 @@ jobs: variables: EXTRA_WHEELS: "https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" DEPENDS: numpy scipy matplotlib h5py pydicom - CHECK_TYPE: test + CHECK_TYPE: nosetests strategy: matrix: ${{ insert }}: ${{ parameters.matrix }} diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 2ef2539c74..5bbd37986c 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -34,7 +34,7 @@ jobs: py38-x64: PYTHON_VERSION: '3.8' PYTHON_ARCH: 'x64' - nosetests: + pytest: PYTHON_VERSION: '3.6' PYTHON_ARCH: 'x64' - CHECK_TYPE: 'nosetests' + CHECK_TYPE: 'test' From 77402be4ba466bc2788135dad4f64e5f20f960d2 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 14:59:15 -0500 Subject: [PATCH 464/689] RF: Avoid running data script code on doctest --- nibabel/tests/data/gen_standard.py | 65 ++++++++++++++------------- nibabel/tests/data/make_moved_anat.py | 17 +++---- 2 files changed, 42 insertions(+), 40 deletions(-) diff --git a/nibabel/tests/data/gen_standard.py b/nibabel/tests/data/gen_standard.py index b97da8ff2f..f966b5599d 100644 --- a/nibabel/tests/data/gen_standard.py +++ b/nibabel/tests/data/gen_standard.py @@ -52,35 +52,36 @@ def _gen_straight_streamline(start, end, steps=3): return streamlines -rng = np.random.RandomState(42) - -width = 4 # Coronal -height = 5 # Sagittal -depth = 7 # Axial - -voxel_size = np.array((1., 3., 2.)) - -# Generate a random mask with voxel order RAS+. -mask = rng.rand(width, height, depth) > 0.8 -mask = (255*mask).astype(np.uint8) - -# Build tractogram -streamlines = mark_the_spot(mask) -tractogram = nib.streamlines.Tractogram(streamlines) - -# Build header -affine = np.eye(4) -affine[range(3), range(3)] = voxel_size -header = {Field.DIMENSIONS: (width, height, depth), - Field.VOXEL_SIZES: voxel_size, - Field.VOXEL_TO_RASMM: affine, - Field.VOXEL_ORDER: 'RAS'} - -# Save the standard mask. -nii = nib.Nifti1Image(mask, affine=affine) -nib.save(nii, "standard.nii.gz") - -# Save the standard tractogram in every available file format. -for ext, cls in FORMATS.items(): - tfile = cls(tractogram, header) - nib.streamlines.save(tfile, "standard" + ext) +if __name__ == '__main__': + rng = np.random.RandomState(42) + + width = 4 # Coronal + height = 5 # Sagittal + depth = 7 # Axial + + voxel_size = np.array((1., 3., 2.)) + + # Generate a random mask with voxel order RAS+. + mask = rng.rand(width, height, depth) > 0.8 + mask = (255*mask).astype(np.uint8) + + # Build tractogram + streamlines = mark_the_spot(mask) + tractogram = nib.streamlines.Tractogram(streamlines) + + # Build header + affine = np.eye(4) + affine[range(3), range(3)] = voxel_size + header = {Field.DIMENSIONS: (width, height, depth), + Field.VOXEL_SIZES: voxel_size, + Field.VOXEL_TO_RASMM: affine, + Field.VOXEL_ORDER: 'RAS'} + + # Save the standard mask. + nii = nib.Nifti1Image(mask, affine=affine) + nib.save(nii, "standard.nii.gz") + + # Save the standard tractogram in every available file format. + for ext, cls in FORMATS.items(): + tfile = cls(tractogram, header) + nib.streamlines.save(tfile, "standard" + ext) diff --git a/nibabel/tests/data/make_moved_anat.py b/nibabel/tests/data/make_moved_anat.py index 6fba2d0902..ec0817885c 100644 --- a/nibabel/tests/data/make_moved_anat.py +++ b/nibabel/tests/data/make_moved_anat.py @@ -12,11 +12,12 @@ from nibabel.eulerangles import euler2mat from nibabel.affines import from_matvec -img = nib.load('anatomical.nii') -some_rotations = euler2mat(0.1, 0.2, 0.3) -extra_affine = from_matvec(some_rotations, [3, 4, 5]) -moved_anat = nib.Nifti1Image(img.dataobj, - extra_affine.dot(img.affine), - img.header) -moved_anat.set_data_dtype(np.float32) -nib.save(moved_anat, 'anat_moved.nii') +if __name__ == '__main__': + img = nib.load('anatomical.nii') + some_rotations = euler2mat(0.1, 0.2, 0.3) + extra_affine = from_matvec(some_rotations, [3, 4, 5]) + moved_anat = nib.Nifti1Image(img.dataobj, + extra_affine.dot(img.affine), + img.header) + moved_anat.set_data_dtype(np.float32) + nib.save(moved_anat, 'anat_moved.nii') From 62132ad73b751c75274c8b2051e025175745bd8e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 15:00:18 -0500 Subject: [PATCH 465/689] TEST: Doctests not ready --- .azure-pipelines/windows.yml | 2 +- .travis.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 363cd64eb4..bbb6f907ad 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -43,7 +43,7 @@ jobs: if %CHECK_TYPE%=="nosetests" ( nosetests --with-doctest --with-coverage --cover-package nibabel nibabel ) else ( - pytest --doctest-modules --cov nibabel -v --pyargs nibabel --deselect streamlines + pytest --cov nibabel -v --pyargs nibabel --deselect streamlines ) displayName: 'Nose tests' - script: | diff --git a/.travis.yml b/.travis.yml index aed8cf06a3..cdcb578406 100644 --- a/.travis.yml +++ b/.travis.yml @@ -139,7 +139,7 @@ script: mkdir for_testing cd for_testing cp ../.coveragerc . - pytest --doctest-modules --cov nibabel -v --pyargs nibabel --deselect streamlines + pytest --cov nibabel -v --pyargs nibabel --deselect streamlines else false fi From 765bec8af1ceeff7afa2c44b9357118fbc81fcdf Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 15:36:15 -0500 Subject: [PATCH 466/689] CI: Try alternative CHECK_TYPE test --- .azure-pipelines/windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index bbb6f907ad..469fd27c96 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -40,7 +40,7 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - if %CHECK_TYPE%=="nosetests" ( + if $(CHECK_TYPE)=="nosetests" ( nosetests --with-doctest --with-coverage --cover-package nibabel nibabel ) else ( pytest --cov nibabel -v --pyargs nibabel --deselect streamlines From f908ca4b5ed690a953c89af45656df558b733514 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 25 Jan 2020 15:37:04 -0500 Subject: [PATCH 467/689] PIN: Avoid pytest 5.3.4 (pytest-dev/pytest#6517) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 5719650bf8..fe6d91bdd3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -62,7 +62,7 @@ nosetests = test = coverage nose >=0.11 - pytest + pytest !=5.3.4 pytest-cov all = %(dicomfs)s From 60479c4879cd27095303ac320ad95c0f6b54b2af Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 09:33:59 -0500 Subject: [PATCH 468/689] TEST: Add BaseTestCase class to skip TestCases starting with ``_`` --- nibabel/testing_pytest/__init__.py | 14 ++++++++++++++ nibabel/tests/test_wrapstruct.py | 30 ++---------------------------- 2 files changed, 16 insertions(+), 28 deletions(-) diff --git a/nibabel/testing_pytest/__init__.py b/nibabel/testing_pytest/__init__.py index 38143d9c38..71217612ed 100644 --- a/nibabel/testing_pytest/__init__.py +++ b/nibabel/testing_pytest/__init__.py @@ -15,6 +15,8 @@ from pkg_resources import resource_filename from os.path import dirname, abspath, join as pjoin +import unittest + import numpy as np from numpy.testing import assert_array_equal, assert_warns from numpy.testing import dec @@ -220,3 +222,15 @@ def assert_arr_dict_equal(dict1, dict2): for key, value1 in dict1.items(): value2 = dict2[key] assert_array_equal(value1, value2) + + +class BaseTestCase(unittest.TestCase): + """ TestCase that does not attempt to run if prefixed with a ``_`` + + This restores the nose-like behavior of skipping so-named test cases + in test runners like pytest. + """ + def setUp(self): + if self.__class__.__name__.startswith('_'): + raise unittest.SkipTest("Base test case - subclass to run") + super().setUp() diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index f627109fd0..f052098475 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -34,7 +34,7 @@ from ..spatialimages import HeaderDataError from .. import imageglobals -from unittest import TestCase, SkipTest +from ..testing_pytest import BaseTestCase from numpy.testing import assert_array_equal import pytest @@ -106,7 +106,7 @@ def log_chk(hdr, level): return hdrc, message, raiser -class _TestWrapStructBase(TestCase): +class _TestWrapStructBase(BaseTestCase): ''' Class implements base tests for binary headers It serves as a base class for other binary header tests @@ -119,8 +119,6 @@ def get_bad_bb(self): return None def test_general_init(self): - if not self.header_class: - pytest.skip() hdr = self.header_class() # binaryblock has length given by header data dtype binblock = hdr.binaryblock @@ -140,8 +138,6 @@ def _set_something_into_hdr(self, hdr): def test__eq__(self): # Test equal and not equal - if not self.header_class: - pytest.skip() hdr1 = self.header_class() hdr2 = self.header_class() assert hdr1 == hdr2 @@ -158,8 +154,6 @@ def test__eq__(self): def test_to_from_fileobj(self): # Successful write using write_to - if not self.header_class: - pytest.skip() hdr = self.header_class() str_io = BytesIO() hdr.write_to(str_io) @@ -169,8 +163,6 @@ def test_to_from_fileobj(self): assert hdr2.binaryblock == hdr.binaryblock def test_mappingness(self): - if not self.header_class: - pytest.skip() hdr = self.header_class() with pytest.raises(ValueError): hdr.__setitem__('nonexistent key', 0.1) @@ -207,16 +199,12 @@ def test_endianness_ro(self): endianness on initialization (or occasionally byteswapping the data) - but this is done via via the as_byteswapped method ''' - if not self.header_class: - pytest.skip() hdr = self.header_class() with pytest.raises(AttributeError): hdr.__setattr__('endianness', '<') def test_endian_guess(self): # Check guesses of endian - if not self.header_class: - pytest.skip() eh = self.header_class() assert eh.endianness == native_code hdr_data = eh.structarr.copy() @@ -231,8 +219,6 @@ def test_binblock_is_file(self): # strings following. More generally, there may be other perhaps # optional data after the binary block, in which case you will need to # override this test - if not self.header_class: - pytest.skip() hdr = self.header_class() str_io = BytesIO() hdr.write_to(str_io) @@ -240,8 +226,6 @@ def test_binblock_is_file(self): def test_structarr(self): # structarr attribute also read only - if not self.header_class: - pytest.skip() hdr = self.header_class() # Just check we can get structarr hdr.structarr @@ -260,8 +244,6 @@ def assert_no_log_err(self, hdr): def test_bytes(self): # Test get of bytes - if not self.header_class: - pytest.skip() hdr1 = self.header_class() bb = hdr1.binaryblock hdr2 = self.header_class(hdr1.binaryblock) @@ -292,8 +274,6 @@ def test_bytes(self): def test_as_byteswapped(self): # Check byte swapping - if not self.header_class: - pytest.skip() hdr = self.header_class() assert hdr.endianness == native_code # same code just returns a copy @@ -318,8 +298,6 @@ def check_fix(self, *args, **kwargs): def test_empty_check(self): # Empty header should be error free - if not self.header_class: - pytest.skip() hdr = self.header_class() hdr.check_fix(error_level=0) @@ -329,8 +307,6 @@ def _dxer(self, hdr): return self.header_class.diagnose_binaryblock(binblock) def test_str(self): - if not self.header_class: - pytest.skip() hdr = self.header_class() # Check something returns from str s1 = str(hdr) @@ -344,8 +320,6 @@ class _TestLabeledWrapStruct(_TestWrapStructBase): def test_get_value_label(self): # Test get value label method # Make a new class to avoid overwriting recoders of original - if not self.header_class: - pytest.skip() class MyHdr(self.header_class): _field_recoders = {} hdr = MyHdr() From 9d1795a861b5108974ab09bb4ee2eadb0c94cc17 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 09:56:30 -0500 Subject: [PATCH 469/689] TMP: Skip failing streamlines modules --- nibabel/streamlines/tests/test_array_sequence.py | 2 ++ nibabel/streamlines/tests/test_tck.py | 2 ++ nibabel/streamlines/tests/test_tractogram.py | 1 + nibabel/streamlines/tests/test_trk.py | 1 + 4 files changed, 6 insertions(+) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index c92580accb..26b824e596 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -9,6 +9,8 @@ from nibabel.testing import assert_arrays_equal from numpy.testing import assert_array_equal +import pytest; pytestmark = pytest.mark.skip() + from ..array_sequence import ArraySequence, is_array_sequence, concatenate diff --git a/nibabel/streamlines/tests/test_tck.py b/nibabel/streamlines/tests/test_tck.py index ad16b52754..573bed02d3 100644 --- a/nibabel/streamlines/tests/test_tck.py +++ b/nibabel/streamlines/tests/test_tck.py @@ -14,6 +14,8 @@ from .. import tck as tck_module from ..tck import TckFile +import pytest; pytestmark = pytest.mark.skip() + from nose.tools import assert_equal, assert_raises, assert_true from numpy.testing import assert_array_equal from nibabel.testing import data_path, clear_and_catch_warnings diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 407f3ef413..ba84735450 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -10,6 +10,7 @@ from nibabel.testing import clear_and_catch_warnings from nose.tools import assert_equal, assert_raises, assert_true from numpy.testing import assert_array_equal, assert_array_almost_equal +import pytest; pytestmark = pytest.mark.skip() from .. import tractogram as module_tractogram from ..tractogram import is_data_dict, is_lazy_dict diff --git a/nibabel/streamlines/tests/test_trk.py b/nibabel/streamlines/tests/test_trk.py index a0a3d8a1f3..8d4f01d766 100644 --- a/nibabel/streamlines/tests/test_trk.py +++ b/nibabel/streamlines/tests/test_trk.py @@ -11,6 +11,7 @@ from nibabel.testing import clear_and_catch_warnings, assert_arr_dict_equal from nose.tools import assert_equal, assert_raises, assert_true from numpy.testing import assert_array_equal +import pytest; pytestmark = pytest.mark.skip() from .test_tractogram import assert_tractogram_equal from ..tractogram import Tractogram From f2ff7feea8f810ce76b42aef87c268a52721316f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 11:00:44 -0500 Subject: [PATCH 470/689] CI: Skip converted tests in nose --- .travis.yml | 56 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cdcb578406..a0c19ff1f3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -133,7 +133,61 @@ script: mkdir for_testing cd for_testing cp ../.coveragerc . - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + nosetests --with-doctest --with-coverage --cover-package nibabel nibabel \ + -I test_api_validators \ + -I test_arrayproxy \ + -I test_arrayriters \ + -I test_batteryrunners \ + -I test_brikhead \ + -I test_casting \ + -I test_data \ + -I test_deprecated \ + -I test_deprecator \ + -I test_dft \ + -I test_ecat \ + -I test_ecat_data \ + -I test_endiancodes \ + -I test_environment \ + -I test_euler \ + -I test_filebasedimages \ + -I test_filehandles \ + -I test_fileholders \ + -I test_filename_parser \ + -I test_files_interface \ + -I test_fileslice \ + -I test_fileutils \ + -I test_floating \ + -I test_funcs \ + -I test_h5py_compat \ + -I test_image_api \ + -I test_image_load_save \ + -I test_image_types \ + -I test_imageclasses \ + -I test_imageglobals \ + -I test_keywordonly \ + -I test_loadsave \ + -I test_minc1 \ + -I test_minc2 \ + -I test_minc2_data \ + -I test_mriutils \ + -I test_nibabel_data \ + -I test_nifti1 \ + -I test_nifti2 \ + -I test_openers \ + -I test_optpkg \ + -I test_orientations \ + -I test_parrec \ + -I test_parrec_data \ + -I test_pkg_info \ + -I test_processing \ + -I test_proxy_api \ + -I test_quaternions \ + -I test_recoder \ + -I test_remmovalschedule \ + -I test_round_trip \ + -I test_rstutils \ + -I test_scaling \ + -I test_wrapstruct elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing From 01b1b7e0348711f01b4c43e539d94d80f71f71a0 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 13:06:54 +0200 Subject: [PATCH 471/689] TEST: pytest conversion #864 #865 --- .../streamlines/tests/test_array_sequence.py | 150 +++++++++++------- 1 file changed, 93 insertions(+), 57 deletions(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 26b824e596..4fdab33d86 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -5,8 +5,8 @@ import itertools import numpy as np -from nose.tools import assert_equal, assert_raises, assert_true -from nibabel.testing import assert_arrays_equal +import pytest +from nibabel.testing_pytest import assert_arrays_equal from numpy.testing import assert_array_equal import pytest; pytestmark = pytest.mark.skip() @@ -17,7 +17,7 @@ SEQ_DATA = {} -def setup(): +def setup_module(): global SEQ_DATA rng = np.random.RandomState(42) SEQ_DATA['rng'] = rng @@ -32,22 +32,25 @@ def generate_data(nb_arrays, common_shape, rng): def check_empty_arr_seq(seq): - assert_equal(len(seq), 0) - assert_equal(len(seq._offsets), 0) - assert_equal(len(seq._lengths), 0) + assert len(seq) == 0 + assert len(seq._offsets) == 0 + assert len(seq._lengths) == 0 # assert_equal(seq._data.ndim, 0) - assert_equal(seq._data.ndim, 1) - assert_true(seq.common_shape == ()) + assert seq._data.ndim == 1 + + # TODO: Check assert_true + # assert_true(seq.common_shape == ()) def check_arr_seq(seq, arrays): lengths = list(map(len, arrays)) - assert_true(is_array_sequence(seq)) - assert_equal(len(seq), len(arrays)) - assert_equal(len(seq._offsets), len(arrays)) - assert_equal(len(seq._lengths), len(arrays)) - assert_equal(seq._data.shape[1:], arrays[0].shape[1:]) - assert_equal(seq.common_shape, arrays[0].shape[1:]) + assert is_array_sequence(seq) == True + assert len(seq) == len(arrays) + assert len(seq._offsets) == len(arrays) + assert len(seq._lengths) == len(arrays) + assert seq._data.shape[1:] == arrays[0].shape[1:] + assert seq.common_shape == arrays[0].shape[1:] + assert_arrays_equal(seq, arrays) # If seq is a view, then order of internal data is not guaranteed. @@ -56,18 +59,20 @@ def check_arr_seq(seq, arrays): assert_array_equal(sorted(seq._lengths), sorted(lengths)) else: seq.shrink_data() - assert_equal(seq._data.shape[0], sum(lengths)) + + assert seq._data.shape[0] == sum(lengths) + assert_array_equal(seq._data, np.concatenate(arrays, axis=0)) assert_array_equal(seq._offsets, np.r_[0, np.cumsum(lengths)[:-1]]) assert_array_equal(seq._lengths, lengths) def check_arr_seq_view(seq_view, seq): - assert_true(seq_view._is_view) - assert_true(seq_view is not seq) - assert_true(np.may_share_memory(seq_view._data, seq._data)) - assert_true(seq_view._offsets is not seq._offsets) - assert_true(seq_view._lengths is not seq._lengths) + assert seq_view._is_view is True + assert (seq_view is not seq) is True + assert (np.may_share_memory(seq_view._data, seq._data)) is True + assert seq_view._offsets is not seq._offsets + assert seq_view._lengths is not seq._lengths class TestArraySequence(unittest.TestCase): @@ -99,8 +104,8 @@ def test_creating_arraysequence_from_generator(self): seq_with_buffer = ArraySequence(gen_2, buffer_size=256) # Check buffer size effect - assert_equal(seq_with_buffer.data.shape, seq.data.shape) - assert_true(seq_with_buffer._buffer_size > seq._buffer_size) + assert seq_with_buffer.data.shape == seq.data.shape + assert seq_with_buffer._buffer_size > seq._buffer_size # Check generator result check_arr_seq(seq, SEQ_DATA['data']) @@ -123,26 +128,27 @@ def test_arraysequence_iter(self): # Try iterating through a corrupted ArraySequence object. seq = SEQ_DATA['seq'].copy() seq._lengths = seq._lengths[::2] - assert_raises(ValueError, list, seq) + with pytest.raises(ValueError): + list(seq) def test_arraysequence_copy(self): orig = SEQ_DATA['seq'] seq = orig.copy() n_rows = seq.total_nb_rows - assert_equal(n_rows, orig.total_nb_rows) + assert n_rows == orig.total_nb_rows assert_array_equal(seq._data, orig._data[:n_rows]) - assert_true(seq._data is not orig._data) + assert seq._data is not orig._data assert_array_equal(seq._offsets, orig._offsets) - assert_true(seq._offsets is not orig._offsets) + assert seq._offsets is not orig._offsets assert_array_equal(seq._lengths, orig._lengths) - assert_true(seq._lengths is not orig._lengths) - assert_equal(seq.common_shape, orig.common_shape) + assert seq._lengths is not orig._lengths + assert seq.common_shape == orig.common_shape # Taking a copy of an `ArraySequence` generated by slicing. # Only keep needed data. seq = orig[::2].copy() check_arr_seq(seq, SEQ_DATA['data'][::2]) - assert_true(seq._data is not orig._data) + assert seq._data is not orig._data def test_arraysequence_append(self): element = generate_data(nb_arrays=1, @@ -173,7 +179,9 @@ def test_arraysequence_append(self): element = generate_data(nb_arrays=1, common_shape=SEQ_DATA['seq'].common_shape*2, rng=SEQ_DATA['rng'])[0] - assert_raises(ValueError, seq.append, element) + + with pytest.raises(ValueError): + seq.append(element) def test_arraysequence_extend(self): new_data = generate_data(nb_arrays=10, @@ -219,7 +227,8 @@ def test_arraysequence_extend(self): common_shape=SEQ_DATA['seq'].common_shape*2, rng=SEQ_DATA['rng']) seq = SEQ_DATA['seq'].copy() # Copy because of in-place modification. - assert_raises(ValueError, seq.extend, data) + with pytest.raises(ValueError): + seq.extend(data) # Extend after extracting some slice working_slice = seq[:2] @@ -264,7 +273,9 @@ def test_arraysequence_getitem(self): for i, keep in enumerate(selection) if keep]) # Test invalid indexing - assert_raises(TypeError, SEQ_DATA['seq'].__getitem__, 'abc') + with pytest.raises(TypeError): + SEQ_DATA['seq'].__getitem__('abc') + #SEQ_DATA['seq'].abc # Get specific columns. seq_view = SEQ_DATA['seq'][:, 2] @@ -287,7 +298,7 @@ def test_arraysequence_setitem(self): # Setitem with a scalar. seq = SEQ_DATA['seq'].copy() seq[:] = 0 - assert_true(seq._data.sum() == 0) + assert seq._data.sum() == 0 # Setitem with a list of ndarray. seq = SEQ_DATA['seq'] * 0 @@ -297,12 +308,12 @@ def test_arraysequence_setitem(self): # Setitem using tuple indexing. seq = ArraySequence(np.arange(900).reshape((50,6,3))) seq[:, 0] = 0 - assert_true(seq._data[:, 0].sum() == 0) + assert seq._data[:, 0].sum() == 0 # Setitem using tuple indexing. seq = ArraySequence(np.arange(900).reshape((50,6,3))) seq[range(len(seq))] = 0 - assert_true(seq._data.sum() == 0) + assert seq._data.sum() == 0 # Setitem of a slice using another slice. seq = ArraySequence(np.arange(900).reshape((50,6,3))) @@ -311,20 +322,26 @@ def test_arraysequence_setitem(self): # Setitem between array sequences with different number of sequences. seq = ArraySequence(np.arange(900).reshape((50,6,3))) - assert_raises(ValueError, seq.__setitem__, slice(0, 4), seq[5:10]) + with pytest.raises(ValueError): + seq.__setitem__(slice(0, 4), seq[5:10]) + # Setitem between array sequences with different amount of points. seq1 = ArraySequence(np.arange(10).reshape(5, 2)) seq2 = ArraySequence(np.arange(15).reshape(5, 3)) - assert_raises(ValueError, seq1.__setitem__, slice(0, 5), seq2) + with pytest.raises(ValueError): + seq1.__setitem__(slice(0, 5), seq2) # Setitem between array sequences with different common shape. seq1 = ArraySequence(np.arange(12).reshape(2, 2, 3)) seq2 = ArraySequence(np.arange(8).reshape(2, 2, 2)) - assert_raises(ValueError, seq1.__setitem__, slice(0, 2), seq2) + + with pytest.raises(ValueError): + seq1.__setitem__(slice(0, 2), seq2) # Invalid index. - assert_raises(TypeError, seq.__setitem__, object(), None) + with pytest.raises(TypeError): + seq.__setitem__(object(), None) def test_arraysequence_operators(self): # Disable division per zero warnings. @@ -343,36 +360,45 @@ def test_arraysequence_operators(self): def _test_unary(op, arrseq): orig = arrseq.copy() seq = getattr(orig, op)() - assert_true(seq is not orig) + assert seq is not orig check_arr_seq(seq, [getattr(d, op)() for d in orig]) def _test_binary(op, arrseq, scalars, seqs, inplace=False): for scalar in scalars: orig = arrseq.copy() seq = getattr(orig, op)(scalar) - assert_true((seq is orig) if inplace else (seq is not orig)) + + if inplace: + assert seq is orig + else: + assert seq is not orig + check_arr_seq(seq, [getattr(e, op)(scalar) for e in arrseq]) # Test math operators with another ArraySequence. for other in seqs: orig = arrseq.copy() seq = getattr(orig, op)(other) - assert_true(seq is not SEQ_DATA['seq']) + assert seq is not SEQ_DATA['seq'] check_arr_seq(seq, [getattr(e1, op)(e2) for e1, e2 in zip(arrseq, other)]) # Operations between array sequences of different lengths. orig = arrseq.copy() - assert_raises(ValueError, getattr(orig, op), orig[::2]) + with pytest.raises(ValueError): + getattr(orig, op)(orig[::2]) # Operations between array sequences with different amount of data. seq1 = ArraySequence(np.arange(10).reshape(5, 2)) seq2 = ArraySequence(np.arange(15).reshape(5, 3)) - assert_raises(ValueError, getattr(seq1, op), seq2) + with pytest.raises(ValueError): + getattr(seq1, op)(seq2) # Operations between array sequences with different common shape. seq1 = ArraySequence(np.arange(12).reshape(2, 2, 3)) seq2 = ArraySequence(np.arange(8).reshape(2, 2, 2)) - assert_raises(ValueError, getattr(seq1, op), seq2) + with pytest.raises(ValueError): + getattr(seq1, op)(seq2) + for op in ["__add__", "__sub__", "__mul__", "__mod__", @@ -394,24 +420,33 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): continue # Going to deal with it separately. _test_binary(op, seq_int, [42, -3, True, 0], [seq_int, seq_bool, -seq_int], inplace=True) # int <-- int - assert_raises(TypeError, _test_binary, op, seq_int, [0.5], [], inplace=True) # int <-- float - assert_raises(TypeError, _test_binary, op, seq_int, [], [seq], inplace=True) # int <-- float + + with pytest.raises(TypeError): + _test_binary(op, seq_int, [0.5], [], inplace=True) # int <-- float + _test_binary(op, seq_int, [], [seq], inplace=True) # int <-- float + # __pow__ : Integers to negative integer powers are not allowed. _test_binary("__pow__", seq, [42, -3, True, 0], [seq_int, seq_bool, -seq_int]) _test_binary("__ipow__", seq, [42, -3, True, 0], [seq_int, seq_bool, -seq_int], inplace=True) - assert_raises(ValueError, _test_binary, "__pow__", seq_int, [-3], []) - assert_raises(ValueError, _test_binary, "__ipow__", seq_int, [-3], [], inplace=True) - + + with pytest.raises(ValueError): + _test_binary("__pow__", seq_int, [-3], []) + _test_binary("__ipow__", seq_int, [-3], [], inplace=True) + # __itruediv__ is only valid with float arrseq. for scalar in SCALARS + ARRSEQS: - assert_raises(TypeError, getattr(seq_int.copy(), "__itruediv__"), scalar) + with pytest.raises(TypeError): + seq_int_cp = seq_int.copy() + seq_int_cp.__itruediv__(scalar) # Bitwise operators for op in ("__lshift__", "__rshift__", "__or__", "__and__", "__xor__"): _test_binary(op, seq_bool, [42, -3, True, 0], [seq_int, seq_bool, -seq_int]) - assert_raises(TypeError, _test_binary, op, seq_bool, [0.5], []) - assert_raises(TypeError, _test_binary, op, seq, [], [seq]) + + with pytest.raises(TypeError): + _test_binary(op, seq_bool, [0.5], []) + _test_binary(op, seq, [], [seq]) # Unary operators for op in ["__neg__", "__abs__"]: @@ -422,7 +457,8 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): _test_unary("__abs__", seq_bool) _test_unary("__invert__", seq_bool) - assert_raises(TypeError, _test_unary, "__invert__", seq) + with pytest.raises(TypeError): + _test_unary("__invert__", seq) # Restore flags. np.seterr(**flags) @@ -442,7 +478,7 @@ def test_arraysequence_repr(self): txt1 = repr(seq) np.set_printoptions(threshold=nb_arrays//2) txt2 = repr(seq) - assert_true(len(txt2) < len(txt1)) + assert len(txt2) < len(txt1) np.set_printoptions(threshold=bkp_threshold) def test_save_and_load_arraysequence(self): @@ -485,10 +521,10 @@ def test_concatenate(): new_seq = concatenate(seqs, axis=1) seq._data += 100 # Modifying the 'seq' shouldn't change 'new_seq'. check_arr_seq(new_seq, SEQ_DATA['data']) - assert_true(not new_seq._is_view) + assert new_seq._is_view is not True seq = SEQ_DATA['seq'] seqs = [seq[:, [i]] for i in range(seq.common_shape[0])] new_seq = concatenate(seqs, axis=0) - assert_true(len(new_seq), seq.common_shape[0] * len(seq)) + assert len(new_seq) == seq.common_shape[0] * len(seq) assert_array_equal(new_seq._data, seq._data.T.reshape((-1, 1))) From ade7bdee208308bc8202ecffda0c9876b619af18 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 14:02:06 +0200 Subject: [PATCH 472/689] TEST: test_tractogram to pytest #865 #864 --- nibabel/streamlines/tests/test_tractogram.py | 278 ++++++++++--------- 1 file changed, 149 insertions(+), 129 deletions(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index ba84735450..bea0650db0 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -6,9 +6,9 @@ import operator from collections import defaultdict -from nibabel.testing import assert_arrays_equal -from nibabel.testing import clear_and_catch_warnings -from nose.tools import assert_equal, assert_raises, assert_true +import pytest +from nibabel.testing_pytest import assert_arrays_equal +from nibabel.testing_pytest import clear_and_catch_warnings from numpy.testing import assert_array_equal, assert_array_almost_equal import pytest; pytestmark = pytest.mark.skip() @@ -94,7 +94,7 @@ def make_dummy_streamline(nb_points): return streamline, data_per_point, data_for_streamline -def setup(): +def setup_module(): global DATA DATA['rng'] = np.random.RandomState(1234) @@ -150,13 +150,12 @@ def check_tractogram_item(tractogram_item, assert_array_equal(tractogram_item.streamline, streamline) - assert_equal(len(tractogram_item.data_for_streamline), - len(data_for_streamline)) + assert len(tractogram_item.data_for_streamline) == len(data_for_streamline) for key in data_for_streamline.keys(): assert_array_equal(tractogram_item.data_for_streamline[key], data_for_streamline[key]) - assert_equal(len(tractogram_item.data_for_points), len(data_for_points)) + assert len(tractogram_item.data_for_points) == len(data_for_points) for key in data_for_points.keys(): assert_arrays_equal(tractogram_item.data_for_points[key], data_for_points[key]) @@ -172,16 +171,16 @@ def check_tractogram(tractogram, data_per_streamline={}, data_per_point={}): streamlines = list(streamlines) - assert_equal(len(tractogram), len(streamlines)) + assert len(tractogram) == len(streamlines) assert_arrays_equal(tractogram.streamlines, streamlines) [t for t in tractogram] # Force iteration through tractogram. - assert_equal(len(tractogram.data_per_streamline), len(data_per_streamline)) + assert len(tractogram.data_per_streamline) == len(data_per_streamline) for key in data_per_streamline.keys(): assert_arrays_equal(tractogram.data_per_streamline[key], data_per_streamline[key]) - assert_equal(len(tractogram.data_per_point), len(data_per_point)) + assert len(tractogram.data_per_point) == len(data_per_point) for key in data_per_point.keys(): assert_arrays_equal(tractogram.data_per_point[key], data_per_point[key]) @@ -205,43 +204,44 @@ def test_per_array_dict_creation(self): nb_streamlines = len(DATA['tractogram']) data_per_streamline = DATA['tractogram'].data_per_streamline data_dict = PerArrayDict(nb_streamlines, data_per_streamline) - assert_equal(data_dict.keys(), data_per_streamline.keys()) + assert data_dict.keys() == data_per_streamline.keys() for k in data_dict.keys(): assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] - assert_equal(len(data_dict), - len(data_per_streamline)-1) + assert len(data_dict) == len(data_per_streamline)-1 # Create a PerArrayDict object using an existing dict object. data_per_streamline = DATA['data_per_streamline'] data_dict = PerArrayDict(nb_streamlines, data_per_streamline) - assert_equal(data_dict.keys(), data_per_streamline.keys()) + assert data_dict.keys() == data_per_streamline.keys() for k in data_dict.keys(): assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] - assert_equal(len(data_dict), len(data_per_streamline)-1) + assert len(data_dict) == len(data_per_streamline)-1 # Create a PerArrayDict object using keyword arguments. data_per_streamline = DATA['data_per_streamline'] data_dict = PerArrayDict(nb_streamlines, **data_per_streamline) - assert_equal(data_dict.keys(), data_per_streamline.keys()) + assert data_dict.keys() == data_per_streamline.keys() for k in data_dict.keys(): assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] - assert_equal(len(data_dict), len(data_per_streamline)-1) + assert len(data_dict) == len(data_per_streamline)-1 def test_getitem(self): sdict = PerArrayDict(len(DATA['tractogram']), DATA['data_per_streamline']) - assert_raises(KeyError, sdict.__getitem__, 'invalid') + with pytest.raises(KeyError): + sdict['invalid'] + #assert_raises(KeyError, sdict.__getitem__, 'invalid') # Test slicing and advanced indexing. for k, v in DATA['tractogram'].data_per_streamline.items(): - assert_true(k in sdict) + assert k in sdict assert_arrays_equal(sdict[k], v) assert_arrays_equal(sdict[::2][k], v[::2]) assert_arrays_equal(sdict[::-1][k], v[::-1]) @@ -259,7 +259,7 @@ def test_extend(self): new_data) sdict.extend(sdict2) - assert_equal(len(sdict), len(sdict2)) + assert len(sdict) == len(sdict2) for k in DATA['tractogram'].data_per_streamline: assert_arrays_equal(sdict[k][:len(DATA['tractogram'])], DATA['tractogram'].data_per_streamline[k]) @@ -279,21 +279,24 @@ def test_extend(self): 'mean_colors': 4 * np.array(DATA['mean_colors']), 'other': 5 * np.array(DATA['mean_colors'])} sdict2 = PerArrayDict(len(DATA['tractogram']), new_data) - assert_raises(ValueError, sdict.extend, sdict2) + with pytest.raises(ValueError): + sdict.extend(sdict2) # Other dict has not the same entries (key mistmached). new_data = {'mean_curvature': 2 * np.array(DATA['mean_curvature']), 'mean_torsion': 3 * np.array(DATA['mean_torsion']), 'other': 4 * np.array(DATA['mean_colors'])} sdict2 = PerArrayDict(len(DATA['tractogram']), new_data) - assert_raises(ValueError, sdict.extend, sdict2) + with pytest.raises(ValueError): + sdict.extend(sdict2) # Other dict has the right number of entries but wrong shape. new_data = {'mean_curvature': 2 * np.array(DATA['mean_curvature']), 'mean_torsion': 3 * np.array(DATA['mean_torsion']), 'mean_colors': 4 * np.array(DATA['mean_torsion'])} sdict2 = PerArrayDict(len(DATA['tractogram']), new_data) - assert_raises(ValueError, sdict.extend, sdict2) + with pytest.raises(ValueError): + sdict.extend(sdict2) class TestPerArraySequenceDict(unittest.TestCase): @@ -304,43 +307,44 @@ def test_per_array_sequence_dict_creation(self): total_nb_rows = DATA['tractogram'].streamlines.total_nb_rows data_per_point = DATA['tractogram'].data_per_point data_dict = PerArraySequenceDict(total_nb_rows, data_per_point) - assert_equal(data_dict.keys(), data_per_point.keys()) + assert data_dict.keys() == data_per_point.keys() for k in data_dict.keys(): assert_arrays_equal(data_dict[k], data_per_point[k]) del data_dict['fa'] - assert_equal(len(data_dict), - len(data_per_point)-1) + assert len(data_dict) == len(data_per_point)-1 # Create a PerArraySequenceDict object using an existing dict object. data_per_point = DATA['data_per_point'] data_dict = PerArraySequenceDict(total_nb_rows, data_per_point) - assert_equal(data_dict.keys(), data_per_point.keys()) + assert data_dict.keys() == data_per_point.keys() for k in data_dict.keys(): assert_arrays_equal(data_dict[k], data_per_point[k]) del data_dict['fa'] - assert_equal(len(data_dict), len(data_per_point)-1) + assert len(data_dict) == len(data_per_point)-1 # Create a PerArraySequenceDict object using keyword arguments. data_per_point = DATA['data_per_point'] data_dict = PerArraySequenceDict(total_nb_rows, **data_per_point) - assert_equal(data_dict.keys(), data_per_point.keys()) + assert data_dict.keys() == data_per_point.keys() for k in data_dict.keys(): assert_arrays_equal(data_dict[k], data_per_point[k]) del data_dict['fa'] - assert_equal(len(data_dict), len(data_per_point)-1) + assert len(data_dict) == len(data_per_point)-1 def test_getitem(self): total_nb_rows = DATA['tractogram'].streamlines.total_nb_rows sdict = PerArraySequenceDict(total_nb_rows, DATA['data_per_point']) - assert_raises(KeyError, sdict.__getitem__, 'invalid') + with pytest.raises(KeyError): + sdict['invalid'] + #assert_raises(KeyError, sdict.__getitem__, 'invalid') # Test slicing and advanced indexing. for k, v in DATA['tractogram'].data_per_point.items(): - assert_true(k in sdict) + assert k in sdict assert_arrays_equal(sdict[k], v) assert_arrays_equal(sdict[::2][k], v[::2]) assert_arrays_equal(sdict[::-1][k], v[::-1]) @@ -361,7 +365,7 @@ def test_extend(self): sdict2 = PerArraySequenceDict(np.sum(list_nb_points), new_data) sdict.extend(sdict2) - assert_equal(len(sdict), len(sdict2)) + assert len(sdict) == len(sdict2) for k in DATA['tractogram'].data_per_point: assert_arrays_equal(sdict[k][:len(DATA['tractogram'])], DATA['tractogram'].data_per_point[k]) @@ -383,7 +387,8 @@ def test_extend(self): data_per_point_shapes, rng=DATA['rng']) sdict2 = PerArraySequenceDict(np.sum(list_nb_points), new_data) - assert_raises(ValueError, sdict.extend, sdict2) + with pytest.raises(ValueError): + sdict.extend(sdict2) # Other dict has not the same entries (key mistmached). data_per_point_shapes = {"colors": DATA['colors'][0].shape[1:], @@ -392,7 +397,8 @@ def test_extend(self): data_per_point_shapes, rng=DATA['rng']) sdict2 = PerArraySequenceDict(np.sum(list_nb_points), new_data) - assert_raises(ValueError, sdict.extend, sdict2) + with pytest.raises(ValueError): + sdict.extend(sdict2) # Other dict has the right number of entries but wrong shape. data_per_point_shapes = {"colors": DATA['colors'][0].shape[1:], @@ -401,7 +407,8 @@ def test_extend(self): data_per_point_shapes, rng=DATA['rng']) sdict2 = PerArraySequenceDict(np.sum(list_nb_points), new_data) - assert_raises(ValueError, sdict.extend, sdict2) + with pytest.raises(ValueError): + sdict.extend(sdict2) class TestLazyDict(unittest.TestCase): @@ -414,14 +421,13 @@ def test_lazydict_creation(self): expected_keys = DATA['data_per_streamline_func'].keys() for data_dict in lazy_dicts: - assert_true(is_lazy_dict(data_dict)) - assert_equal(data_dict.keys(), expected_keys) + assert is_lazy_dict(data_dict) is True + assert data_dict.keys() == expected_keys for k in data_dict.keys(): assert_array_equal(list(data_dict[k]), list(DATA['data_per_streamline'][k])) - assert_equal(len(data_dict), - len(DATA['data_per_streamline_func'])) + assert len(data_dict) == len(DATA['data_per_streamline_func']) class TestTractogramItem(unittest.TestCase): @@ -440,7 +446,7 @@ def test_creating_tractogram_item(self): # Create a tractogram item with a streamline, data. t = TractogramItem(streamline, data_for_streamline, data_for_points) - assert_equal(len(t), len(streamline)) + assert len(t) == len(streamline) assert_array_equal(t.streamline, streamline) assert_array_equal(list(t), streamline) assert_array_equal(t.data_for_streamline['mean_curvature'], @@ -457,7 +463,7 @@ def test_tractogram_creation(self): # Create an empty tractogram. tractogram = Tractogram() check_tractogram(tractogram) - assert_true(tractogram.affine_to_rasmm is None) + assert tractogram.affine_to_rasmm is None # Create a tractogram with only streamlines tractogram = Tractogram(streamlines=DATA['streamlines']) @@ -478,8 +484,8 @@ def test_tractogram_creation(self): DATA['data_per_streamline'], DATA['data_per_point']) - assert_true(is_data_dict(tractogram.data_per_streamline)) - assert_true(is_data_dict(tractogram.data_per_point)) + assert is_data_dict(tractogram.data_per_streamline) is True + assert is_data_dict(tractogram.data_per_point) is True # Create a tractogram from another tractogram attributes. tractogram2 = Tractogram(tractogram.streamlines, @@ -503,8 +509,9 @@ def test_tractogram_creation(self): [(0, 0, 1)]*5] data_per_point = {'wrong_data': wrong_data} - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_point=data_per_point) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_point=data_per_point) # Inconsistent number of scalars between streamlines wrong_data = [[(1, 0, 0)]*1, @@ -512,8 +519,9 @@ def test_tractogram_creation(self): [(0, 0, 1)]*5] data_per_point = {'wrong_data': wrong_data} - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_point=data_per_point) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_point=data_per_point) def test_setting_affine_to_rasmm(self): tractogram = DATA['tractogram'].copy() @@ -521,19 +529,20 @@ def test_setting_affine_to_rasmm(self): # Test assigning None. tractogram.affine_to_rasmm = None - assert_true(tractogram.affine_to_rasmm is None) + assert tractogram.affine_to_rasmm is None # Test assigning a valid ndarray (should make a copy). tractogram.affine_to_rasmm = affine - assert_true(tractogram.affine_to_rasmm is not affine) + assert tractogram.affine_to_rasmm is not affine # Test assigning a list of lists. tractogram.affine_to_rasmm = affine.tolist() assert_array_equal(tractogram.affine_to_rasmm, affine) # Test assigning a ndarray with wrong shape. - assert_raises(ValueError, setattr, tractogram, - "affine_to_rasmm", affine[::2]) + with pytest.raises(ValueError): + tractogram.affine_to_rasmm = affine[::2] + def test_tractogram_getitem(self): # Retrieve TractogramItem by their index. @@ -594,21 +603,21 @@ def test_tractogram_copy(self): tractogram = DATA['tractogram'].copy() # Check we copied the data and not simply created new references. - assert_true(tractogram is not DATA['tractogram']) - assert_true(tractogram.streamlines - is not DATA['tractogram'].streamlines) - assert_true(tractogram.data_per_streamline - is not DATA['tractogram'].data_per_streamline) - assert_true(tractogram.data_per_point - is not DATA['tractogram'].data_per_point) + assert tractogram is not DATA['tractogram'] + assert tractogram.streamlines is \ + not DATA['tractogram'].streamlines + assert tractogram.data_per_streamline is \ + not DATA['tractogram'].data_per_streamline + assert tractogram.data_per_point is \ + not DATA['tractogram'].data_per_point for key in tractogram.data_per_streamline: - assert_true(tractogram.data_per_streamline[key] - is not DATA['tractogram'].data_per_streamline[key]) + assert tractogram.data_per_streamline[key] is \ + not DATA['tractogram'].data_per_streamline[key] for key in tractogram.data_per_point: - assert_true(tractogram.data_per_point[key] - is not DATA['tractogram'].data_per_point[key]) + assert tractogram.data_per_point[key] is \ + not DATA['tractogram'].data_per_point[key] # Check the values of the data are the same. assert_tractogram_equal(tractogram, DATA['tractogram']) @@ -619,39 +628,44 @@ def test_creating_invalid_tractogram(self): [(0, 1, 0)]*2, [(0, 0, 1)]*3] # Last streamlines has 5 points. - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_point={'scalars': scalars}) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_point={'scalars':scalars}) # Not enough data_per_streamline for all streamlines. properties = [np.array([1.11, 1.22], dtype="f4"), np.array([3.11, 3.22], dtype="f4")] - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_streamline={'properties': properties}) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_streamline={'properties': properties}) # Inconsistent dimension for a data_per_point. scalars = [[(1, 0, 0)]*1, [(0, 1)]*2, [(0, 0, 1)]*5] - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_point={'scalars': scalars}) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_point={'scalars':scalars}) # Inconsistent dimension for a data_per_streamline. properties = [[1.11, 1.22], [2.11], [3.11, 3.22]] - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_streamline={'properties': properties}) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_streamline={'properties': properties}) # Too many dimension for a data_per_streamline. properties = [np.array([[1.11], [1.22]], dtype="f4"), np.array([[2.11], [2.22]], dtype="f4"), np.array([[3.11], [3.22]], dtype="f4")] - assert_raises(ValueError, Tractogram, DATA['streamlines'], - data_per_streamline={'properties': properties}) + with pytest.raises(ValueError): + Tractogram(streamlines=DATA['streamlines'], + data_per_streamline={'properties': properties}) def test_tractogram_apply_affine(self): tractogram = DATA['tractogram'].copy() @@ -661,7 +675,7 @@ def test_tractogram_apply_affine(self): # Apply the affine to the streamline in a lazy manner. transformed_tractogram = tractogram.apply_affine(affine, lazy=True) - assert_true(type(transformed_tractogram) is LazyTractogram) + assert type(transformed_tractogram) is LazyTractogram check_tractogram(transformed_tractogram, streamlines=[s*scaling for s in DATA['streamlines']], data_per_streamline=DATA['data_per_streamline'], @@ -674,7 +688,7 @@ def test_tractogram_apply_affine(self): # Apply the affine to the streamlines in-place. transformed_tractogram = tractogram.apply_affine(affine) - assert_true(transformed_tractogram is tractogram) + assert transformed_tractogram is tractogram check_tractogram(tractogram, streamlines=[s*scaling for s in DATA['streamlines']], data_per_streamline=DATA['data_per_streamline'], @@ -690,7 +704,7 @@ def test_tractogram_apply_affine(self): # shouldn't affect the remaining streamlines. tractogram = DATA['tractogram'].copy() transformed_tractogram = tractogram[::2].apply_affine(affine) - assert_true(transformed_tractogram is not tractogram) + assert transformed_tractogram is not tractogram check_tractogram(tractogram[::2], streamlines=[s*scaling for s in DATA['streamlines'][::2]], data_per_streamline=DATA['tractogram'].data_per_streamline[::2], @@ -724,7 +738,7 @@ def test_tractogram_apply_affine(self): tractogram = DATA['tractogram'].copy() tractogram.affine_to_rasmm = None tractogram.apply_affine(affine) - assert_true(tractogram.affine_to_rasmm is None) + assert tractogram.affine_to_rasmm is None def test_tractogram_to_world(self): tractogram = DATA['tractogram'].copy() @@ -738,7 +752,7 @@ def test_tractogram_to_world(self): np.linalg.inv(affine)) tractogram_world = transformed_tractogram.to_world(lazy=True) - assert_true(type(tractogram_world) is LazyTractogram) + assert type(tractogram_world) is LazyTractogram assert_array_almost_equal(tractogram_world.affine_to_rasmm, np.eye(4)) for s1, s2 in zip(tractogram_world.streamlines, DATA['streamlines']): @@ -746,14 +760,14 @@ def test_tractogram_to_world(self): # Bring them back streamlines to world space in a in-place manner. tractogram_world = transformed_tractogram.to_world() - assert_true(tractogram_world is tractogram) + assert tractogram_world is tractogram assert_array_almost_equal(tractogram.affine_to_rasmm, np.eye(4)) for s1, s2 in zip(tractogram.streamlines, DATA['streamlines']): assert_array_almost_equal(s1, s2) # Calling to_world twice should do nothing. tractogram_world2 = transformed_tractogram.to_world() - assert_true(tractogram_world2 is tractogram) + assert tractogram_world2 is tractogram assert_array_almost_equal(tractogram.affine_to_rasmm, np.eye(4)) for s1, s2 in zip(tractogram.streamlines, DATA['streamlines']): assert_array_almost_equal(s1, s2) @@ -761,7 +775,8 @@ def test_tractogram_to_world(self): # Calling to_world when affine_to_rasmm is None should fail. tractogram = DATA['tractogram'].copy() tractogram.affine_to_rasmm = None - assert_raises(ValueError, tractogram.to_world) + with pytest.raises(ValueError): + tractogram.to_world() def test_tractogram_extend(self): # Load tractogram that contains some metadata. @@ -771,7 +786,7 @@ def test_tractogram_extend(self): (extender, True)): first_arg = t.copy() new_t = op(first_arg, t) - assert_equal(new_t is first_arg, in_place) + assert (new_t is first_arg) is in_place assert_tractogram_equal(new_t[:len(t)], DATA['tractogram']) assert_tractogram_equal(new_t[len(t):], DATA['tractogram']) @@ -790,7 +805,8 @@ class TestLazyTractogram(unittest.TestCase): def test_lazy_tractogram_creation(self): # To create tractogram from arrays use `Tractogram`. - assert_raises(TypeError, LazyTractogram, DATA['streamlines']) + with pytest.raises(TypeError): + LazyTractogram(streamlines=DATA['streamlines']) # Streamlines and other data as generators streamlines = (x for x in DATA['streamlines']) @@ -801,29 +817,28 @@ def test_lazy_tractogram_creation(self): # Creating LazyTractogram with generators is not allowed as # generators get exhausted and are not reusable unlike generator # function. - assert_raises(TypeError, LazyTractogram, streamlines) - assert_raises(TypeError, LazyTractogram, - data_per_point={"none": None}) - assert_raises(TypeError, LazyTractogram, - data_per_streamline=data_per_streamline) - assert_raises(TypeError, LazyTractogram, DATA['streamlines'], - data_per_point=data_per_point) + with pytest.raises(TypeError): + LazyTractogram(streamlines=streamlines) + LazyTractogram(data_per_point={"none": None}) + LazyTractogram(data_per_streamline=data_per_streamline) + LazyTractogram(streamlines=DATA['streamlines'], + data_per_point=data_per_point) # Empty `LazyTractogram` tractogram = LazyTractogram() check_tractogram(tractogram) - assert_true(tractogram.affine_to_rasmm is None) + assert tractogram.affine_to_rasmm is None # Create tractogram with streamlines and other data tractogram = LazyTractogram(DATA['streamlines_func'], DATA['data_per_streamline_func'], DATA['data_per_point_func']) - assert_true(is_lazy_dict(tractogram.data_per_streamline)) - assert_true(is_lazy_dict(tractogram.data_per_point)) + assert is_lazy_dict(tractogram.data_per_streamline) is True + assert is_lazy_dict(tractogram.data_per_point) is True [t for t in tractogram] # Force iteration through tractogram. - assert_equal(len(tractogram), len(DATA['streamlines'])) + assert len(tractogram) == len(DATA['streamlines']) # Generator functions get re-called and creates new iterators. for i in range(2): @@ -855,18 +870,20 @@ def _data_gen(): assert_tractogram_equal(tractogram, DATA['tractogram']) # Creating a LazyTractogram from not a corouting should raise an error. - assert_raises(TypeError, LazyTractogram.from_data_func, _data_gen()) + with pytest.raises(TypeError): + LazyTractogram.from_data_func(_data_gen()) def test_lazy_tractogram_getitem(self): - assert_raises(NotImplementedError, - DATA['lazy_tractogram'].__getitem__, 0) + with pytest.raises(NotImplementedError): + DATA['lazy_tractogram'][0] def test_lazy_tractogram_extend(self): t = DATA['lazy_tractogram'].copy() new_t = DATA['lazy_tractogram'].copy() for op in (operator.add, operator.iadd, extender): - assert_raises(NotImplementedError, op, new_t, t) + with pytest.raises(NotImplementedError): + op(new_t, t) def test_lazy_tractogram_len(self): modules = [module_tractogram] # Modules for which to catch warnings. @@ -875,35 +892,35 @@ def test_lazy_tractogram_len(self): # Calling `len` will create new generators each time. tractogram = LazyTractogram(DATA['streamlines_func']) - assert_true(tractogram._nb_streamlines is None) + assert tractogram._nb_streamlines is None # This should produce a warning message. - assert_equal(len(tractogram), len(DATA['streamlines'])) - assert_equal(tractogram._nb_streamlines, len(DATA['streamlines'])) - assert_equal(len(w), 1) + assert len(tractogram) == len(DATA['streamlines']) + assert tractogram._nb_streamlines == len(DATA['streamlines']) + assert len(w) == 1 tractogram = LazyTractogram(DATA['streamlines_func']) # New instances should still produce a warning message. - assert_equal(len(tractogram), len(DATA['streamlines'])) - assert_equal(len(w), 2) - assert_true(issubclass(w[-1].category, Warning)) + assert len(tractogram) == len(DATA['streamlines']) + assert len(w) == 2 + assert issubclass(w[-1].category, Warning) is True # Calling again 'len' again should *not* produce a warning. - assert_equal(len(tractogram), len(DATA['streamlines'])) - assert_equal(len(w), 2) + assert len(tractogram) == len(DATA['streamlines']) + assert len(w) == 2 with clear_and_catch_warnings(record=True, modules=modules) as w: # Once we iterated through the tractogram, we know the length. tractogram = LazyTractogram(DATA['streamlines_func']) - assert_true(tractogram._nb_streamlines is None) + assert tractogram._nb_streamlines is None [t for t in tractogram] # Force iteration through tractogram. - assert_equal(tractogram._nb_streamlines, len(DATA['streamlines'])) + assert tractogram._nb_streamlines == len(DATA['streamlines']) # This should *not* produce a warning. - assert_equal(len(tractogram), len(DATA['streamlines'])) - assert_equal(len(w), 0) + assert len(tractogram) == len(DATA['streamlines']) + assert len(w) == 0 def test_lazy_tractogram_apply_affine(self): affine = np.eye(4) @@ -913,7 +930,7 @@ def test_lazy_tractogram_apply_affine(self): tractogram = DATA['lazy_tractogram'].copy() transformed_tractogram = tractogram.apply_affine(affine) - assert_true(transformed_tractogram is not tractogram) + assert transformed_tractogram is not tractogram assert_array_equal(tractogram._affine_to_apply, np.eye(4)) assert_array_equal(tractogram.affine_to_rasmm, np.eye(4)) assert_array_equal(transformed_tractogram._affine_to_apply, affine) @@ -935,14 +952,15 @@ def test_lazy_tractogram_apply_affine(self): # Calling to_world when affine_to_rasmm is None should fail. tractogram = DATA['lazy_tractogram'].copy() tractogram.affine_to_rasmm = None - assert_raises(ValueError, tractogram.to_world) + with pytest.raises(ValueError): + tractogram.to_world() # But calling apply_affine when affine_to_rasmm is None should work. tractogram = DATA['lazy_tractogram'].copy() tractogram.affine_to_rasmm = None transformed_tractogram = tractogram.apply_affine(affine) assert_array_equal(transformed_tractogram._affine_to_apply, affine) - assert_true(transformed_tractogram.affine_to_rasmm is None) + assert transformed_tractogram.affine_to_rasmm is None check_tractogram(transformed_tractogram, streamlines=[s*scaling for s in DATA['streamlines']], data_per_streamline=DATA['data_per_streamline'], @@ -950,8 +968,9 @@ def test_lazy_tractogram_apply_affine(self): # Calling apply_affine with lazy=False should fail for LazyTractogram. tractogram = DATA['lazy_tractogram'].copy() - assert_raises(ValueError, tractogram.apply_affine, - affine=np.eye(4), lazy=False) + with pytest.raises(ValueError): + tractogram.apply_affine(affine=np.eye(4), lazy=False) + def test_tractogram_to_world(self): tractogram = DATA['lazy_tractogram'].copy() @@ -965,7 +984,7 @@ def test_tractogram_to_world(self): np.linalg.inv(affine)) tractogram_world = transformed_tractogram.to_world() - assert_true(tractogram_world is not transformed_tractogram) + assert tractogram_world is not transformed_tractogram assert_array_almost_equal(tractogram_world.affine_to_rasmm, np.eye(4)) for s1, s2 in zip(tractogram_world.streamlines, DATA['streamlines']): @@ -980,40 +999,41 @@ def test_tractogram_to_world(self): # Calling to_world when affine_to_rasmm is None should fail. tractogram = DATA['lazy_tractogram'].copy() tractogram.affine_to_rasmm = None - assert_raises(ValueError, tractogram.to_world) + with pytest.raises(ValueError): + tractogram.to_world() def test_lazy_tractogram_copy(self): # Create a copy of the lazy tractogram. tractogram = DATA['lazy_tractogram'].copy() # Check we copied the data and not simply created new references. - assert_true(tractogram is not DATA['lazy_tractogram']) + assert tractogram is not DATA['lazy_tractogram'] # When copying LazyTractogram, the generator function yielding # streamlines should stay the same. - assert_true(tractogram._streamlines - is DATA['lazy_tractogram']._streamlines) + assert tractogram._streamlines \ + is DATA['lazy_tractogram']._streamlines # Copying LazyTractogram, creates new internal LazyDict objects, # but generator functions contained in it should stay the same. - assert_true(tractogram._data_per_streamline - is not DATA['lazy_tractogram']._data_per_streamline) - assert_true(tractogram._data_per_point - is not DATA['lazy_tractogram']._data_per_point) + assert tractogram._data_per_streamline \ + is not DATA['lazy_tractogram']._data_per_streamline + assert tractogram._data_per_point \ + is not DATA['lazy_tractogram']._data_per_point for key in tractogram.data_per_streamline: data = tractogram.data_per_streamline.store[key] expected = DATA['lazy_tractogram'].data_per_streamline.store[key] - assert_true(data is expected) + assert data is expected for key in tractogram.data_per_point: data = tractogram.data_per_point.store[key] expected = DATA['lazy_tractogram'].data_per_point.store[key] - assert_true(data is expected) + assert data is expected # The affine should be a copy. - assert_true(tractogram._affine_to_apply - is not DATA['lazy_tractogram']._affine_to_apply) + assert tractogram._affine_to_apply \ + is not DATA['lazy_tractogram']._affine_to_apply assert_array_equal(tractogram._affine_to_apply, DATA['lazy_tractogram']._affine_to_apply) From 083a89f94b594825b92c8351e7fa7fe008273a83 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 16:57:39 +0200 Subject: [PATCH 473/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 4fdab33d86..8c0aba0069 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -44,7 +44,7 @@ def check_empty_arr_seq(seq): def check_arr_seq(seq, arrays): lengths = list(map(len, arrays)) - assert is_array_sequence(seq) == True + assert is_array_sequence(seq) assert len(seq) == len(arrays) assert len(seq._offsets) == len(arrays) assert len(seq._lengths) == len(arrays) From a12438cf34de270a27351dd6242f73b2b7272fd2 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 16:58:04 +0200 Subject: [PATCH 474/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 8c0aba0069..73c8fae8db 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -6,7 +6,7 @@ import numpy as np import pytest -from nibabel.testing_pytest import assert_arrays_equal +from ...testing_pytest import assert_arrays_equal from numpy.testing import assert_array_equal import pytest; pytestmark = pytest.mark.skip() From a44ed467351231fc69398d948251cdbaa8295b8a Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 16:58:19 +0200 Subject: [PATCH 475/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 73c8fae8db..2078f19e6c 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -69,7 +69,7 @@ def check_arr_seq(seq, arrays): def check_arr_seq_view(seq_view, seq): assert seq_view._is_view is True - assert (seq_view is not seq) is True + assert seq_view is not seq assert (np.may_share_memory(seq_view._data, seq._data)) is True assert seq_view._offsets is not seq._offsets assert seq_view._lengths is not seq._lengths From 68d5f9840edc076c5c98b6a0dad3d55fd391e9a6 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:03:03 +0200 Subject: [PATCH 476/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 2078f19e6c..e518201194 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -274,7 +274,7 @@ def test_arraysequence_getitem(self): # Test invalid indexing with pytest.raises(TypeError): - SEQ_DATA['seq'].__getitem__('abc') + SEQ_DATA['seq']['abc'] #SEQ_DATA['seq'].abc # Get specific columns. From 312baa2b8830b1a99bcdd359279aaee7ef51640a Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:03:45 +0200 Subject: [PATCH 477/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index e518201194..fbe923cb98 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -330,7 +330,7 @@ def test_arraysequence_setitem(self): seq1 = ArraySequence(np.arange(10).reshape(5, 2)) seq2 = ArraySequence(np.arange(15).reshape(5, 3)) with pytest.raises(ValueError): - seq1.__setitem__(slice(0, 5), seq2) + seq1[0:5] = seq2 # Setitem between array sequences with different common shape. seq1 = ArraySequence(np.arange(12).reshape(2, 2, 3)) From d2a5878d1937b99633918bca2f25c41801400088 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:04:00 +0200 Subject: [PATCH 478/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index fbe923cb98..3fe11dc521 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -337,7 +337,7 @@ def test_arraysequence_setitem(self): seq2 = ArraySequence(np.arange(8).reshape(2, 2, 2)) with pytest.raises(ValueError): - seq1.__setitem__(slice(0, 2), seq2) + seq1[0:2] = seq2 # Invalid index. with pytest.raises(TypeError): From cffd3bf8aa769dd378a1fd8ba352fe2d3b11854f Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:08:11 +0200 Subject: [PATCH 479/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 3fe11dc521..18cee5da80 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -423,6 +423,7 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): with pytest.raises(TypeError): _test_binary(op, seq_int, [0.5], [], inplace=True) # int <-- float + with pytest.raises(TypeError): _test_binary(op, seq_int, [], [seq], inplace=True) # int <-- float From b4630790811142dac1ecd3c0a67041ec82e9f0ec Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:08:25 +0200 Subject: [PATCH 480/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 18cee5da80..a87277f64b 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -433,6 +433,7 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): with pytest.raises(ValueError): _test_binary("__pow__", seq_int, [-3], []) + with pytest.raises(ValueError): _test_binary("__ipow__", seq_int, [-3], [], inplace=True) # __itruediv__ is only valid with float arrseq. From b3fa3fbe71d5e5410fc677f5c309facaf95ca486 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:09:37 +0200 Subject: [PATCH 481/689] Update nibabel/streamlines/tests/test_array_sequence.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_array_sequence.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index a87277f64b..9c1ac74b1e 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -448,6 +448,7 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): with pytest.raises(TypeError): _test_binary(op, seq_bool, [0.5], []) + with pytest.raises(TypeError): _test_binary(op, seq, [], [seq]) # Unary operators From 6e864f594399e3ae7946669a92745c3bf8f31611 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:09:53 +0200 Subject: [PATCH 482/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index bea0650db0..a43c3b0666 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -7,7 +7,7 @@ from collections import defaultdict import pytest -from nibabel.testing_pytest import assert_arrays_equal +from ....testing_pytest import assert_arrays_equal, clear_and_catch_warnings from nibabel.testing_pytest import clear_and_catch_warnings from numpy.testing import assert_array_equal, assert_array_almost_equal import pytest; pytestmark = pytest.mark.skip() From cb681cddf3a322d07d5146e8f7868f80ce9ce873 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:10:06 +0200 Subject: [PATCH 483/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index a43c3b0666..61b8d8acbb 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -8,7 +8,6 @@ import pytest from ....testing_pytest import assert_arrays_equal, clear_and_catch_warnings -from nibabel.testing_pytest import clear_and_catch_warnings from numpy.testing import assert_array_equal, assert_array_almost_equal import pytest; pytestmark = pytest.mark.skip() From 8b01cd45f5d280059f409d3e9db2b4838c66ce01 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:10:25 +0200 Subject: [PATCH 484/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 61b8d8acbb..8141005b50 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -208,7 +208,7 @@ def test_per_array_dict_creation(self): assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] - assert len(data_dict) == len(data_per_streamline)-1 + assert len(data_dict) == len(data_per_streamline) - 1 # Create a PerArrayDict object using an existing dict object. data_per_streamline = DATA['data_per_streamline'] From c1176ecac30293707f1e226ddf9850e342eb01d0 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:10:48 +0200 Subject: [PATCH 485/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 8141005b50..bc1eeb5094 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -218,7 +218,7 @@ def test_per_array_dict_creation(self): assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] - assert len(data_dict) == len(data_per_streamline)-1 + assert len(data_dict) == len(data_per_streamline) - 1 # Create a PerArrayDict object using keyword arguments. data_per_streamline = DATA['data_per_streamline'] From d643c7f03777786676593615902a1f6e9e122016 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:11:06 +0200 Subject: [PATCH 486/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index bc1eeb5094..4a9ca698ae 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -228,7 +228,7 @@ def test_per_array_dict_creation(self): assert_array_equal(data_dict[k], data_per_streamline[k]) del data_dict['mean_curvature'] - assert len(data_dict) == len(data_per_streamline)-1 + assert len(data_dict) == len(data_per_streamline) - 1 def test_getitem(self): sdict = PerArrayDict(len(DATA['tractogram']), From 8326522259ac3669eefb470324151328d2963652 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:11:44 +0200 Subject: [PATCH 487/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 4a9ca698ae..4c0f32d0b9 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -311,7 +311,7 @@ def test_per_array_sequence_dict_creation(self): assert_arrays_equal(data_dict[k], data_per_point[k]) del data_dict['fa'] - assert len(data_dict) == len(data_per_point)-1 + assert len(data_dict) == len(data_per_point) - 1 # Create a PerArraySequenceDict object using an existing dict object. data_per_point = DATA['data_per_point'] From 745108ad4e52d86cad75f0a66411f0c7ce2b19a6 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:13:05 +0200 Subject: [PATCH 488/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 4c0f32d0b9..f8a4ad515d 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -785,7 +785,7 @@ def test_tractogram_extend(self): (extender, True)): first_arg = t.copy() new_t = op(first_arg, t) - assert (new_t is first_arg) is in_place + assert (new_t is first_arg) == in_place assert_tractogram_equal(new_t[:len(t)], DATA['tractogram']) assert_tractogram_equal(new_t[len(t):], DATA['tractogram']) From c758df74c4be3f0f07b8509420c3368ca0c920cf Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:14:16 +0200 Subject: [PATCH 489/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index f8a4ad515d..b3706fe1c2 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -321,7 +321,7 @@ def test_per_array_sequence_dict_creation(self): assert_arrays_equal(data_dict[k], data_per_point[k]) del data_dict['fa'] - assert len(data_dict) == len(data_per_point)-1 + assert len(data_dict) == len(data_per_point) - 1 # Create a PerArraySequenceDict object using keyword arguments. data_per_point = DATA['data_per_point'] From d168423d7a5aaf91653e1e4167f9f50d21ab57d2 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:14:48 +0200 Subject: [PATCH 490/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index b3706fe1c2..071ef1d331 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -484,7 +484,7 @@ def test_tractogram_creation(self): DATA['data_per_point']) assert is_data_dict(tractogram.data_per_streamline) is True - assert is_data_dict(tractogram.data_per_point) is True + assert is_data_dict(tractogram.data_per_point) # Create a tractogram from another tractogram attributes. tractogram2 = Tractogram(tractogram.streamlines, From 1ad759b43081980044f80c29f7bb18f7e7172eaf Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:15:05 +0200 Subject: [PATCH 491/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 071ef1d331..9ce15a7729 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -420,7 +420,7 @@ def test_lazydict_creation(self): expected_keys = DATA['data_per_streamline_func'].keys() for data_dict in lazy_dicts: - assert is_lazy_dict(data_dict) is True + assert is_lazy_dict(data_dict) assert data_dict.keys() == expected_keys for k in data_dict.keys(): assert_array_equal(list(data_dict[k]), From 7bc83a72198270ec5df7878d5ac980d39061dd2f Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:15:26 +0200 Subject: [PATCH 492/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 9ce15a7729..72c405ed73 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -331,7 +331,7 @@ def test_per_array_sequence_dict_creation(self): assert_arrays_equal(data_dict[k], data_per_point[k]) del data_dict['fa'] - assert len(data_dict) == len(data_per_point)-1 + assert len(data_dict) == len(data_per_point) - 1 def test_getitem(self): total_nb_rows = DATA['tractogram'].streamlines.total_nb_rows From bfa7a3dc82cecdd91369cdaf25a4ef41ff6701b0 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:15:42 +0200 Subject: [PATCH 493/689] Update nibabel/streamlines/tests/test_tractogram.py Co-Authored-By: Chris Markiewicz --- nibabel/streamlines/tests/test_tractogram.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 72c405ed73..b44feae45f 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -483,7 +483,7 @@ def test_tractogram_creation(self): DATA['data_per_streamline'], DATA['data_per_point']) - assert is_data_dict(tractogram.data_per_streamline) is True + assert is_data_dict(tractogram.data_per_streamline) assert is_data_dict(tractogram.data_per_point) # Create a tractogram from another tractogram attributes. From 4fa17af5a6246ad0b5da875945c5943fe7484be3 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:23:08 +0200 Subject: [PATCH 494/689] Update test_array_sequence.py --- nibabel/streamlines/tests/test_array_sequence.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 9c1ac74b1e..c1429f8fc4 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -367,11 +367,8 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): for scalar in scalars: orig = arrseq.copy() seq = getattr(orig, op)(scalar) - - if inplace: - assert seq is orig - else: - assert seq is not orig + + assert (seq is orig) == inplace check_arr_seq(seq, [getattr(e, op)(scalar) for e in arrseq]) @@ -438,9 +435,9 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): # __itruediv__ is only valid with float arrseq. for scalar in SCALARS + ARRSEQS: + seq_int_cp = seq_int.copy() with pytest.raises(TypeError): - seq_int_cp = seq_int.copy() - seq_int_cp.__itruediv__(scalar) + seq_int_cp /= scalar # Bitwise operators for op in ("__lshift__", "__rshift__", "__or__", "__and__", "__xor__"): From 51016a783bbb90943ecaa6ab16a22efe23ea36d4 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 17:27:59 +0200 Subject: [PATCH 495/689] Update test_tractogram.py --- nibabel/streamlines/tests/test_tractogram.py | 37 +++++++++----------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index b44feae45f..2fa72e3514 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -603,20 +603,15 @@ def test_tractogram_copy(self): # Check we copied the data and not simply created new references. assert tractogram is not DATA['tractogram'] - assert tractogram.streamlines is \ - not DATA['tractogram'].streamlines - assert tractogram.data_per_streamline is \ - not DATA['tractogram'].data_per_streamline - assert tractogram.data_per_point is \ - not DATA['tractogram'].data_per_point + assert tractogram.streamlines is not DATA['tractogram'].streamlines + assert tractogram.data_per_streamline is not DATA['tractogram'].data_per_streamline + assert tractogram.data_per_point is not DATA['tractogram'].data_per_point for key in tractogram.data_per_streamline: - assert tractogram.data_per_streamline[key] is \ - not DATA['tractogram'].data_per_streamline[key] + assert tractogram.data_per_streamline[key] is not DATA['tractogram'].data_per_streamline[key] for key in tractogram.data_per_point: - assert tractogram.data_per_point[key] is \ - not DATA['tractogram'].data_per_point[key] + assert tractogram.data_per_point[key] is not DATA['tractogram'].data_per_point[key] # Check the values of the data are the same. assert_tractogram_equal(tractogram, DATA['tractogram']) @@ -818,8 +813,14 @@ def test_lazy_tractogram_creation(self): # function. with pytest.raises(TypeError): LazyTractogram(streamlines=streamlines) + + with pytest.raises(TypeError): LazyTractogram(data_per_point={"none": None}) + + with pytest.raises(TypeError): LazyTractogram(data_per_streamline=data_per_streamline) + + with pytest.raises(TypeError): LazyTractogram(streamlines=DATA['streamlines'], data_per_point=data_per_point) @@ -833,8 +834,8 @@ def test_lazy_tractogram_creation(self): DATA['data_per_streamline_func'], DATA['data_per_point_func']) - assert is_lazy_dict(tractogram.data_per_streamline) is True - assert is_lazy_dict(tractogram.data_per_point) is True + assert is_lazy_dict(tractogram.data_per_streamline) + assert is_lazy_dict(tractogram.data_per_point) [t for t in tractogram] # Force iteration through tractogram. assert len(tractogram) == len(DATA['streamlines']) @@ -1010,15 +1011,12 @@ def test_lazy_tractogram_copy(self): # When copying LazyTractogram, the generator function yielding # streamlines should stay the same. - assert tractogram._streamlines \ - is DATA['lazy_tractogram']._streamlines + assert tractogram._streamlines is DATA['lazy_tractogram']._streamlines # Copying LazyTractogram, creates new internal LazyDict objects, # but generator functions contained in it should stay the same. - assert tractogram._data_per_streamline \ - is not DATA['lazy_tractogram']._data_per_streamline - assert tractogram._data_per_point \ - is not DATA['lazy_tractogram']._data_per_point + assert tractogram._data_per_streamline is not DATA['lazy_tractogram']._data_per_streamline + assert tractogram._data_per_point is not DATA['lazy_tractogram']._data_per_point for key in tractogram.data_per_streamline: data = tractogram.data_per_streamline.store[key] @@ -1031,8 +1029,7 @@ def test_lazy_tractogram_copy(self): assert data is expected # The affine should be a copy. - assert tractogram._affine_to_apply \ - is not DATA['lazy_tractogram']._affine_to_apply + assert tractogram._affine_to_apply is not DATA['lazy_tractogram']._affine_to_apply assert_array_equal(tractogram._affine_to_apply, DATA['lazy_tractogram']._affine_to_apply) From bc0c67bad84ebe7902d33707b7e1ce4543195b76 Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 11:13:30 -0500 Subject: [PATCH 496/689] make sure cov is installed --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index fe6d91bdd3..34c8456a04 100644 --- a/setup.cfg +++ b/setup.cfg @@ -59,6 +59,7 @@ nosetests = coverage nose >=0.11 pytest + pytest-cov test = coverage nose >=0.11 From ab2e0b89929a4a0ecb8a614359f857bb98638c8b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 11:30:23 -0500 Subject: [PATCH 497/689] TEST: Fix import/indentation errors --- .../streamlines/tests/test_array_sequence.py | 2 +- nibabel/streamlines/tests/test_tractogram.py | 18 +++++++----------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index c1429f8fc4..e6a3af3e62 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -420,7 +420,7 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): with pytest.raises(TypeError): _test_binary(op, seq_int, [0.5], [], inplace=True) # int <-- float - with pytest.raises(TypeError): + with pytest.raises(TypeError): _test_binary(op, seq_int, [], [seq], inplace=True) # int <-- float diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 2fa72e3514..0ab3cb3106 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -7,7 +7,7 @@ from collections import defaultdict import pytest -from ....testing_pytest import assert_arrays_equal, clear_and_catch_warnings +from ...testing_pytest import assert_arrays_equal, clear_and_catch_warnings from numpy.testing import assert_array_equal, assert_array_almost_equal import pytest; pytestmark = pytest.mark.skip() @@ -632,7 +632,7 @@ def test_creating_invalid_tractogram(self): with pytest.raises(ValueError): Tractogram(streamlines=DATA['streamlines'], - data_per_streamline={'properties': properties}) + data_per_streamline={'properties': properties}) # Inconsistent dimension for a data_per_point. scalars = [[(1, 0, 0)]*1, @@ -650,7 +650,7 @@ def test_creating_invalid_tractogram(self): with pytest.raises(ValueError): Tractogram(streamlines=DATA['streamlines'], - data_per_streamline={'properties': properties}) + data_per_streamline={'properties': properties}) # Too many dimension for a data_per_streamline. properties = [np.array([[1.11], [1.22]], dtype="f4"), @@ -659,7 +659,7 @@ def test_creating_invalid_tractogram(self): with pytest.raises(ValueError): Tractogram(streamlines=DATA['streamlines'], - data_per_streamline={'properties': properties}) + data_per_streamline={'properties': properties}) def test_tractogram_apply_affine(self): tractogram = DATA['tractogram'].copy() @@ -813,16 +813,12 @@ def test_lazy_tractogram_creation(self): # function. with pytest.raises(TypeError): LazyTractogram(streamlines=streamlines) - with pytest.raises(TypeError): LazyTractogram(data_per_point={"none": None}) - - with pytest.raises(TypeError): + with pytest.raises(TypeError): LazyTractogram(data_per_streamline=data_per_streamline) - - with pytest.raises(TypeError): - LazyTractogram(streamlines=DATA['streamlines'], - data_per_point=data_per_point) + with pytest.raises(TypeError): + LazyTractogram(streamlines=DATA['streamlines'], data_per_point=data_per_point) # Empty `LazyTractogram` tractogram = LazyTractogram() From 5ec71fb768b7469a4fd9c55e3f71cb0e361d9bcd Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 11:41:38 -0500 Subject: [PATCH 498/689] ENH: Use generic unittest.SkipTest in missing optional_package --- nibabel/optpkg.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/nibabel/optpkg.py b/nibabel/optpkg.py index f87f64da9f..3590cd3c00 100644 --- a/nibabel/optpkg.py +++ b/nibabel/optpkg.py @@ -3,11 +3,6 @@ from distutils.version import LooseVersion from .tripwire import TripWire -if pkgutil.find_loader('nose'): - have_nose = True -else: - have_nose = False - def _check_pkg_version(pkg, min_version): # Default version checking function @@ -115,10 +110,8 @@ def optional_package(name, trip_msg=None, min_version=None): % (name, name, exc)) pkg = TripWire(trip_msg) - # TODO dj: no clue why is it needed... def setup_module(): - if have_nose: - import nose - raise nose.plugins.skip.SkipTest('No %s for these tests' - % name) + import unittest + raise unittest.SkipTest('No %s for these tests' % name) + return pkg, False, setup_module From f2a96be6fa76f197f237d5f0ca4480d4baed171e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 11:53:31 -0500 Subject: [PATCH 499/689] TEST: Cleanup test_array_sequence --- .../streamlines/tests/test_array_sequence.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index e6a3af3e62..0802d19b19 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -9,8 +9,6 @@ from ...testing_pytest import assert_arrays_equal from numpy.testing import assert_array_equal -import pytest; pytestmark = pytest.mark.skip() - from ..array_sequence import ArraySequence, is_array_sequence, concatenate @@ -37,9 +35,7 @@ def check_empty_arr_seq(seq): assert len(seq._lengths) == 0 # assert_equal(seq._data.ndim, 0) assert seq._data.ndim == 1 - - # TODO: Check assert_true - # assert_true(seq.common_shape == ()) + assert seq.common_shape == () def check_arr_seq(seq, arrays): @@ -68,9 +64,9 @@ def check_arr_seq(seq, arrays): def check_arr_seq_view(seq_view, seq): - assert seq_view._is_view is True + assert seq_view._is_view assert seq_view is not seq - assert (np.may_share_memory(seq_view._data, seq._data)) is True + assert np.may_share_memory(seq_view._data, seq._data) assert seq_view._offsets is not seq._offsets assert seq_view._lengths is not seq._lengths @@ -179,7 +175,6 @@ def test_arraysequence_append(self): element = generate_data(nb_arrays=1, common_shape=SEQ_DATA['seq'].common_shape*2, rng=SEQ_DATA['rng'])[0] - with pytest.raises(ValueError): seq.append(element) @@ -275,7 +270,6 @@ def test_arraysequence_getitem(self): # Test invalid indexing with pytest.raises(TypeError): SEQ_DATA['seq']['abc'] - #SEQ_DATA['seq'].abc # Get specific columns. seq_view = SEQ_DATA['seq'][:, 2] @@ -323,8 +317,7 @@ def test_arraysequence_setitem(self): # Setitem between array sequences with different number of sequences. seq = ArraySequence(np.arange(900).reshape((50,6,3))) with pytest.raises(ValueError): - seq.__setitem__(slice(0, 4), seq[5:10]) - + seq[0:4] = seq[5:10] # Setitem between array sequences with different amount of points. seq1 = ArraySequence(np.arange(10).reshape(5, 2)) @@ -341,7 +334,7 @@ def test_arraysequence_setitem(self): # Invalid index. with pytest.raises(TypeError): - seq.__setitem__(object(), None) + seq[object()] = None def test_arraysequence_operators(self): # Disable division per zero warnings. @@ -367,7 +360,6 @@ def _test_binary(op, arrseq, scalars, seqs, inplace=False): for scalar in scalars: orig = arrseq.copy() seq = getattr(orig, op)(scalar) - assert (seq is orig) == inplace check_arr_seq(seq, [getattr(e, op)(scalar) for e in arrseq]) From 3b6f32a78be96e069ee208bfe2727a4b83a022f3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 11:53:51 -0500 Subject: [PATCH 500/689] CI: Skip more nose tests --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index a0c19ff1f3..b6ef1a18a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -134,12 +134,15 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel \ + -I test_array_sequence \ + -I test_tractogram \ -I test_api_validators \ -I test_arrayproxy \ - -I test_arrayriters \ + -I test_arraywriters \ -I test_batteryrunners \ -I test_brikhead \ -I test_casting \ + -I test_cifti2io_header \ -I test_data \ -I test_deprecated \ -I test_deprecator \ From 49909dc35159b1974db7b9a3abb91b576155abd8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 11:59:13 -0500 Subject: [PATCH 501/689] TEST: Un-skip tractogram tests, style cleanups --- nibabel/streamlines/tests/test_tractogram.py | 31 +++++++------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 0ab3cb3106..16f1df2989 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -9,7 +9,6 @@ import pytest from ...testing_pytest import assert_arrays_equal, clear_and_catch_warnings from numpy.testing import assert_array_equal, assert_array_almost_equal -import pytest; pytestmark = pytest.mark.skip() from .. import tractogram as module_tractogram from ..tractogram import is_data_dict, is_lazy_dict @@ -236,7 +235,6 @@ def test_getitem(self): with pytest.raises(KeyError): sdict['invalid'] - #assert_raises(KeyError, sdict.__getitem__, 'invalid') # Test slicing and advanced indexing. for k, v in DATA['tractogram'].data_per_streamline.items(): @@ -339,7 +337,6 @@ def test_getitem(self): with pytest.raises(KeyError): sdict['invalid'] - #assert_raises(KeyError, sdict.__getitem__, 'invalid') # Test slicing and advanced indexing. for k, v in DATA['tractogram'].data_per_point.items(): @@ -509,8 +506,7 @@ def test_tractogram_creation(self): data_per_point = {'wrong_data': wrong_data} with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_point=data_per_point) + Tractogram(streamlines=DATA['streamlines'], data_per_point=data_per_point) # Inconsistent number of scalars between streamlines wrong_data = [[(1, 0, 0)]*1, @@ -519,8 +515,7 @@ def test_tractogram_creation(self): data_per_point = {'wrong_data': wrong_data} with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_point=data_per_point) + Tractogram(streamlines=DATA['streamlines'], data_per_point=data_per_point) def test_setting_affine_to_rasmm(self): tractogram = DATA['tractogram'].copy() @@ -542,7 +537,6 @@ def test_setting_affine_to_rasmm(self): with pytest.raises(ValueError): tractogram.affine_to_rasmm = affine[::2] - def test_tractogram_getitem(self): # Retrieve TractogramItem by their index. for i, t in enumerate(DATA['tractogram']): @@ -623,25 +617,22 @@ def test_creating_invalid_tractogram(self): [(0, 0, 1)]*3] # Last streamlines has 5 points. with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_point={'scalars':scalars}) + Tractogram(streamlines=DATA['streamlines'], data_per_point={'scalars': scalars}) # Not enough data_per_streamline for all streamlines. properties = [np.array([1.11, 1.22], dtype="f4"), np.array([3.11, 3.22], dtype="f4")] with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_streamline={'properties': properties}) + Tractogram(streamlines=DATA['streamlines'], data_per_streamline={'properties': properties}) # Inconsistent dimension for a data_per_point. - scalars = [[(1, 0, 0)]*1, - [(0, 1)]*2, - [(0, 0, 1)]*5] + scalars = [[(1, 0, 0)] * 1, + [(0, 1)] * 2, + [(0, 0, 1)] * 5] with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_point={'scalars':scalars}) + Tractogram(streamlines=DATA['streamlines'], data_per_point={'scalars':scalars}) # Inconsistent dimension for a data_per_streamline. properties = [[1.11, 1.22], @@ -649,8 +640,7 @@ def test_creating_invalid_tractogram(self): [3.11, 3.22]] with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_streamline={'properties': properties}) + Tractogram(streamlines=DATA['streamlines'], data_per_streamline={'properties': properties}) # Too many dimension for a data_per_streamline. properties = [np.array([[1.11], [1.22]], dtype="f4"), @@ -658,8 +648,7 @@ def test_creating_invalid_tractogram(self): np.array([[3.11], [3.22]], dtype="f4")] with pytest.raises(ValueError): - Tractogram(streamlines=DATA['streamlines'], - data_per_streamline={'properties': properties}) + Tractogram(streamlines=DATA['streamlines'], data_per_streamline={'properties': properties}) def test_tractogram_apply_affine(self): tractogram = DATA['tractogram'].copy() From 8fe1c2cd9e76a6c4b3f1f3f90fe723ea05b2861c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 13:51:53 -0500 Subject: [PATCH 502/689] TEST: Fix optpkg test --- nibabel/tests/test_optpkg.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index 5603fc5127..a6b9571530 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -41,10 +41,10 @@ def test_basic(): # We never have package _not_a_package assert_bad('_not_a_package') - # setup_module imports nose, so make sure we don't disrupt that + # setup_module imports unittest, so make sure we don't disrupt that orig_import = builtins.__import__ def raise_Exception(*args, **kwargs): - if args[0] == 'nose': + if args[0] == 'unittest': return orig_import(*args, **kwargs) raise Exception( "non ImportError could be thrown by some malfunctioning module " From 4fbcb79fc88bc2cbc29eb0eb3fb751ce9d66f4dc Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 13:54:15 -0500 Subject: [PATCH 503/689] TEST: Style updates to test_analyze --- nibabel/tests/test_analyze.py | 60 ++++++++++++----------------------- 1 file changed, 20 insertions(+), 40 deletions(-) diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 9032f136c5..14bfb3b5e7 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -67,8 +67,7 @@ class TestAnalyzeHeader(_TestLabeledWrapStruct): def test_supported_types(self): hdr = self.header_class() - assert (self.supported_np_types == - supported_np_types(hdr)) + assert self.supported_np_types == supported_np_types(hdr) def get_bad_bb(self): # A value for the binary block that should raise an error @@ -117,12 +116,10 @@ def test_checks(self): hdr = hdr_t.copy() hdr['sizeof_hdr'] = 1 with suppress_warnings(): - assert (self._dxer(hdr) == 'sizeof_hdr should be ' + - str(self.sizeof_hdr)) + assert self._dxer(hdr) == 'sizeof_hdr should be ' + str(self.sizeof_hdr) hdr = hdr_t.copy() hdr['datatype'] = 0 - assert (self._dxer(hdr) == 'data code 0 not supported\n' - 'bitpix does not match datatype') + assert self._dxer(hdr) == 'data code 0 not supported\nbitpix does not match datatype' hdr = hdr_t.copy() hdr['bitpix'] = 0 assert self._dxer(hdr) == 'bitpix does not match datatype' @@ -144,11 +141,8 @@ def test_log_checks(self): fhdr, message, raiser = self.log_chk(hdr, 30) assert fhdr['sizeof_hdr'] == self.sizeof_hdr - assert (message == - 'sizeof_hdr should be {0}; set sizeof_hdr to {0}'.format( - self.sizeof_hdr)) - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + assert message == 'sizeof_hdr should be {0}; set sizeof_hdr to {0}'.format(self.sizeof_hdr) + pytest.raises(*raiser) # RGB datatype does not raise error hdr = HC() hdr.set_data_dtype('RGB') @@ -158,28 +152,22 @@ def test_log_checks(self): hdr['datatype'] = -1 # severity 40 with suppress_warnings(): fhdr, message, raiser = self.log_chk(hdr, 40) - assert (message == 'data code -1 not recognized; ' - 'not attempting fix') + assert message == 'data code -1 not recognized; not attempting fix' - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + pytest.raises(*raiser) # datatype not supported hdr['datatype'] = 255 # severity 40 fhdr, message, raiser = self.log_chk(hdr, 40) - assert (message == 'data code 255 not supported; ' - 'not attempting fix') - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + assert message == 'data code 255 not supported; not attempting fix' + pytest.raises(*raiser) # bitpix hdr = HC() hdr['datatype'] = 16 # float32 hdr['bitpix'] = 16 # severity 10 fhdr, message, raiser = self.log_chk(hdr, 10) assert fhdr['bitpix'] == 32 - assert (message == 'bitpix does not match datatype; ' - 'setting bitpix to match datatype') - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + assert message == 'bitpix does not match datatype; setting bitpix to match datatype' + pytest.raises(*raiser) def test_pixdim_log_checks(self): # pixdim positive @@ -188,17 +176,14 @@ def test_pixdim_log_checks(self): hdr['pixdim'][1] = -2 # severity 35 fhdr, message, raiser = self.log_chk(hdr, 35) assert fhdr['pixdim'][1] == 2 - assert (message == 'pixdim[1,2,3] should be positive; ' - 'setting to abs of pixdim values') - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + assert message == 'pixdim[1,2,3] should be positive; setting to abs of pixdim values' + pytest.raises(*raiser) hdr = HC() hdr['pixdim'][1] = 0 # severity 30 fhdr, message, raiser = self.log_chk(hdr, 30) assert fhdr['pixdim'][1] == 1 assert message == PIXDIM0_MSG - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + pytest.raises(*raiser) # both hdr = HC() hdr['pixdim'][1] = 0 # severity 30 @@ -206,12 +191,9 @@ def test_pixdim_log_checks(self): fhdr, message, raiser = self.log_chk(hdr, 35) assert fhdr['pixdim'][1] == 1 assert fhdr['pixdim'][2] == 2 - assert (message == 'pixdim[1,2,3] should be ' - 'non-zero and pixdim[1,2,3] should ' - 'be positive; setting 0 dims to 1 ' - 'and setting to abs of pixdim values') - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + assert message == ('pixdim[1,2,3] should be non-zero and pixdim[1,2,3] should be ' + 'positive; setting 0 dims to 1 and setting to abs of pixdim values') + pytest.raises(*raiser) def test_no_scaling_fixes(self): # Check we do not fix slope or intercept @@ -252,9 +234,8 @@ def test_logger_error(self): # Check log message appears in new logger imageglobals.logger = logger hdr.copy().check_fix() - assert (str_io.getvalue() == - 'bitpix does not match datatype; ' - 'setting bitpix to match datatype\n') + assert str_io.getvalue() == ('bitpix does not match datatype; ' + 'setting bitpix to match datatype\n') # Check that error_level in fact causes error to be raised imageglobals.error_level = 10 with pytest.raises(HeaderDataError): @@ -711,8 +692,7 @@ class TestAnalyzeImage(tsi.TestSpatialImage, tsi.MmapImageMixin): def test_supported_types(self): img = self.image_class(np.zeros((2, 3, 4)), np.eye(4)) - assert (self.supported_np_types == - supported_np_types(img)) + assert self.supported_np_types == supported_np_types(img) def test_default_header(self): # Check default header is as expected From 24cf57da6038cb03805a582639e154441e039af7 Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 21:55:41 +0200 Subject: [PATCH 504/689] TEST: test_trk to pytest --- nibabel/streamlines/tests/test_trk.py | 127 ++++++++++++++------------ 1 file changed, 69 insertions(+), 58 deletions(-) diff --git a/nibabel/streamlines/tests/test_trk.py b/nibabel/streamlines/tests/test_trk.py index 8d4f01d766..f88631965e 100644 --- a/nibabel/streamlines/tests/test_trk.py +++ b/nibabel/streamlines/tests/test_trk.py @@ -7,11 +7,10 @@ from io import BytesIO -from nibabel.testing import data_path -from nibabel.testing import clear_and_catch_warnings, assert_arr_dict_equal -from nose.tools import assert_equal, assert_raises, assert_true +import pytest +from ...testing_pytest import data_path +from ...testing_pytest import clear_and_catch_warnings, assert_arr_dict_equal from numpy.testing import assert_array_equal -import pytest; pytestmark = pytest.mark.skip() from .test_tractogram import assert_tractogram_equal from ..tractogram import Tractogram @@ -24,7 +23,7 @@ DATA = {} -def setup(): +def setup_module(): global DATA DATA['empty_trk_fname'] = pjoin(data_path, "empty.trk") @@ -133,45 +132,51 @@ def test_load_file_with_wrong_information(self): trk_struct[Field.VOXEL_TO_RASMM] = np.zeros((4, 4)) with clear_and_catch_warnings(record=True, modules=[trk_module]) as w: trk = TrkFile.load(BytesIO(trk_bytes)) - assert_equal(len(w), 1) - assert_true(issubclass(w[0].category, HeaderWarning)) - assert_true("identity" in str(w[0].message)) + assert len(w) == 1 + assert issubclass(w[0].category, HeaderWarning) + assert "identity" in str(w[0].message) assert_array_equal(trk.affine, np.eye(4)) # Simulate a TRK where `vox_to_ras` is invalid. trk_struct, trk_bytes = self.trk_with_bytes() trk_struct[Field.VOXEL_TO_RASMM] = np.diag([0, 0, 0, 1]) with clear_and_catch_warnings(record=True, modules=[trk_module]) as w: - assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes)) + with pytest.raises(HeaderError): + TrkFile.load(BytesIO(trk_bytes)) # Simulate a TRK file where `voxel_order` was not provided. trk_struct, trk_bytes = self.trk_with_bytes() trk_struct[Field.VOXEL_ORDER] = b'' with clear_and_catch_warnings(record=True, modules=[trk_module]) as w: TrkFile.load(BytesIO(trk_bytes)) - assert_equal(len(w), 1) - assert_true(issubclass(w[0].category, HeaderWarning)) - assert_true("LPS" in str(w[0].message)) + assert len(w) == 1 + assert issubclass(w[0].category, HeaderWarning) + assert "LPS" in str(w[0].message) # Simulate a TRK file with an unsupported version. trk_struct, trk_bytes = self.trk_with_bytes() trk_struct['version'] = 123 - assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes)) + with pytest.raises(HeaderError): + TrkFile.load(BytesIO(trk_bytes)) + # Simulate a TRK file with a wrong hdr_size. trk_struct, trk_bytes = self.trk_with_bytes() trk_struct['hdr_size'] = 1234 - assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes)) + with pytest.raises(HeaderError): + TrkFile.load(BytesIO(trk_bytes)) # Simulate a TRK file with a wrong scalar_name. trk_struct, trk_bytes = self.trk_with_bytes('complex_trk_fname') trk_struct['scalar_name'][0, 0] = b'colors\x003\x004' - assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes)) + with pytest.raises(HeaderError): + TrkFile.load(BytesIO(trk_bytes)) # Simulate a TRK file with a wrong property_name. trk_struct, trk_bytes = self.trk_with_bytes('complex_trk_fname') trk_struct['property_name'][0, 0] = b'colors\x003\x004' - assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes)) + with pytest.raises(HeaderError): + TrkFile.load(BytesIO(trk_bytes)) def test_load_trk_version_1(self): # Simulate and test a TRK (version 1). @@ -184,9 +189,9 @@ def test_load_trk_version_1(self): trk_struct['version'] = 1 with clear_and_catch_warnings(record=True, modules=[trk_module]) as w: trk = TrkFile.load(BytesIO(trk_bytes)) - assert_equal(len(w), 1) - assert_true(issubclass(w[0].category, HeaderWarning)) - assert_true("identity" in str(w[0].message)) + assert len(w) == 1 + assert issubclass(w[0].category, HeaderWarning) + assert "identity" in str(w[0].message) assert_array_equal(trk.affine, np.eye(4)) assert_array_equal(trk.header['version'], 1) @@ -196,8 +201,8 @@ def test_load_complex_file_in_big_endian(self): # We use hdr_size as an indicator of little vs big endian. good_orders = '>' if sys.byteorder == 'little' else '>=' hdr_size = trk_struct['hdr_size'] - assert_true(hdr_size.dtype.byteorder in good_orders) - assert_equal(hdr_size, 1000) + assert hdr_size.dtype.byteorder in good_orders + assert hdr_size == 1000 for lazy_load in [False, True]: trk = TrkFile.load(DATA['complex_trk_big_endian_fname'], @@ -206,7 +211,7 @@ def test_load_complex_file_in_big_endian(self): def test_tractogram_file_properties(self): trk = TrkFile.load(DATA['simple_trk_fname']) - assert_equal(trk.streamlines, trk.tractogram.streamlines) + assert trk.streamlines == trk.tractogram.streamlines assert_array_equal(trk.affine, trk.header[Field.VOXEL_TO_RASMM]) def test_write_empty_file(self): @@ -224,8 +229,7 @@ def test_write_empty_file(self): assert_tractogram_equal(new_trk.tractogram, new_trk_orig.tractogram) trk_file.seek(0, os.SEEK_SET) - assert_equal(trk_file.read(), - open(DATA['empty_trk_fname'], 'rb').read()) + assert trk_file.read() == open(DATA['empty_trk_fname'], 'rb').read() def test_write_simple_file(self): tractogram = Tractogram(DATA['streamlines'], @@ -243,8 +247,7 @@ def test_write_simple_file(self): assert_tractogram_equal(new_trk.tractogram, new_trk_orig.tractogram) trk_file.seek(0, os.SEEK_SET) - assert_equal(trk_file.read(), - open(DATA['simple_trk_fname'], 'rb').read()) + assert trk_file.read() == open(DATA['simple_trk_fname'], 'rb').read() def test_write_complex_file(self): # With scalars @@ -293,8 +296,7 @@ def test_write_complex_file(self): assert_tractogram_equal(new_trk.tractogram, new_trk_orig.tractogram) trk_file.seek(0, os.SEEK_SET) - assert_equal(trk_file.read(), - open(DATA['complex_trk_fname'], 'rb').read()) + assert trk_file.read() == open(DATA['complex_trk_fname'], 'rb').read() def test_load_write_file(self): for fname in [DATA['empty_trk_fname'], @@ -329,8 +331,7 @@ def test_load_write_LPS_file(self): assert_tractogram_equal(new_trk.tractogram, new_trk_orig.tractogram) trk_file.seek(0, os.SEEK_SET) - assert_equal(trk_file.read(), - open(DATA['standard_LPS_trk_fname'], 'rb').read()) + assert trk_file.read() == open(DATA['standard_LPS_trk_fname'], 'rb').read() # Test writing a file where the header is missing the # Field.VOXEL_ORDER. @@ -353,8 +354,7 @@ def test_load_write_LPS_file(self): assert_tractogram_equal(new_trk.tractogram, new_trk_orig.tractogram) trk_file.seek(0, os.SEEK_SET) - assert_equal(trk_file.read(), - open(DATA['standard_LPS_trk_fname'], 'rb').read()) + assert trk_file.read() == open(DATA['standard_LPS_trk_fname'], 'rb').read() def test_write_optional_header_fields(self): # The TRK file format doesn't support additional header fields. @@ -368,7 +368,7 @@ def test_write_optional_header_fields(self): trk_file.seek(0, os.SEEK_SET) new_trk = TrkFile.load(trk_file) - assert_true("extra" not in new_trk.header) + assert "extra" not in new_trk.header def test_write_too_many_scalars_and_properties(self): # TRK supports up to 10 data_per_point. @@ -396,7 +396,8 @@ def test_write_too_many_scalars_and_properties(self): affine_to_rasmm=np.eye(4)) trk = TrkFile(tractogram) - assert_raises(ValueError, trk.save, BytesIO()) + with pytest.raises(ValueError): + trk.save(BytesIO()) # TRK supports up to 10 data_per_streamline. data_per_streamline = {} @@ -422,7 +423,8 @@ def test_write_too_many_scalars_and_properties(self): data_per_streamline=data_per_streamline) trk = TrkFile(tractogram) - assert_raises(ValueError, trk.save, BytesIO()) + with pytest.raises(ValueError): + trk.save(BytesIO()) def test_write_scalars_and_properties_name_too_long(self): # TRK supports data_per_point name up to 20 characters. @@ -438,7 +440,8 @@ def test_write_scalars_and_properties_name_too_long(self): trk = TrkFile(tractogram) if nb_chars > 18: - assert_raises(ValueError, trk.save, BytesIO()) + with pytest.raises(ValueError): + trk.save(BytesIO()) else: trk.save(BytesIO()) @@ -449,7 +452,8 @@ def test_write_scalars_and_properties_name_too_long(self): trk = TrkFile(tractogram) if nb_chars > 20: - assert_raises(ValueError, trk.save, BytesIO()) + with pytest.raises(ValueError): + trk.save(BytesIO()) else: trk.save(BytesIO()) @@ -466,7 +470,8 @@ def test_write_scalars_and_properties_name_too_long(self): trk = TrkFile(tractogram) if nb_chars > 18: - assert_raises(ValueError, trk.save, BytesIO()) + with pytest.raises(ValueError): + trk.save(BytesIO()) else: trk.save(BytesIO()) @@ -477,7 +482,8 @@ def test_write_scalars_and_properties_name_too_long(self): trk = TrkFile(tractogram) if nb_chars > 20: - assert_raises(ValueError, trk.save, BytesIO()) + with pytest.raises(ValueError): + trk.save(BytesIO()) else: trk.save(BytesIO()) @@ -499,32 +505,37 @@ def test_header_read_restore(self): hdr_from_fname['_offset_data'] += hdr_pos # Correct for start position assert_arr_dict_equal(TrkFile._read_header(bio), hdr_from_fname) # Check fileobject file position has not changed - assert_equal(bio.tell(), hdr_pos) + assert bio.tell() == hdr_pos def test_encode_names(): # Test function for encoding numbers into property names b0 = b'\x00' - assert_equal(encode_value_in_name(0, 'foo', 10), - b'foo' + b0 * 7) - assert_equal(encode_value_in_name(1, 'foo', 10), - b'foo' + b0 * 7) - assert_equal(encode_value_in_name(8, 'foo', 10), - b'foo' + b0 + b'8' + b0 * 5) - assert_equal(encode_value_in_name(40, 'foobar', 10), - b'foobar' + b0 + b'40' + b0) - assert_equal(encode_value_in_name(1, 'foobarbazz', 10), b'foobarbazz') - assert_raises(ValueError, encode_value_in_name, 1, 'foobarbazzz', 10) - assert_raises(ValueError, encode_value_in_name, 2, 'foobarbaz', 10) - assert_equal(encode_value_in_name(2, 'foobarba', 10), b'foobarba\x002') + assert encode_value_in_name(0, 'foo', 10) == b'foo' + b0 * 7 + assert encode_value_in_name(1, 'foo', 10) == b'foo' + b0 * 7 + assert encode_value_in_name(8, 'foo', 10) == b'foo' + b0 + b'8' + b0 * 5 + assert encode_value_in_name(40, 'foobar', 10) == b'foobar' + b0 + b'40' + b0 + assert encode_value_in_name(1, 'foobarbazz', 10) == b'foobarbazz' + + with pytest.raises(ValueError): + encode_value_in_name(1, 'foobarbazzz', 10) + + with pytest.raises(ValueError): + encode_value_in_name(2, 'foobarbazzz', 10) + + assert encode_value_in_name(2, 'foobarba', 10) == b'foobarba\x002' def test_decode_names(): # Test function for decoding name string into name, number b0 = b'\x00' - assert_equal(decode_value_from_name(b''), ('', 0)) - assert_equal(decode_value_from_name(b'foo' + b0 * 7), ('foo', 1)) - assert_equal(decode_value_from_name(b'foo\x008' + b0 * 5), ('foo', 8)) - assert_equal(decode_value_from_name(b'foobar\x0010\x00'), ('foobar', 10)) - assert_raises(ValueError, decode_value_from_name, b'foobar\x0010\x01') - assert_raises(HeaderError, decode_value_from_name, b'foo\x0010\x00111') + assert decode_value_from_name(b'') == ('', 0) + assert decode_value_from_name(b'foo' + b0 * 7) == ('foo', 1) + assert decode_value_from_name(b'foo\x008' + b0 * 5) == ('foo', 8) + assert decode_value_from_name(b'foobar\x0010\x00') == ('foobar', 10) + + with pytest.raises(ValueError): + decode_value_from_name(b'foobar\x0010\x01') + + with pytest.raises(HeaderError): + decode_value_from_name(b'foo\x0010\x00111') From 3a2b0d10629146fc73e1729765caf526e244dd5b Mon Sep 17 00:00:00 2001 From: robbisg Date: Tue, 4 Feb 2020 21:55:56 +0200 Subject: [PATCH 505/689] TEST: test_tck to pytest --- nibabel/streamlines/tests/test_tck.py | 58 ++++++++++++++------------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/nibabel/streamlines/tests/test_tck.py b/nibabel/streamlines/tests/test_tck.py index 573bed02d3..fb29776f75 100644 --- a/nibabel/streamlines/tests/test_tck.py +++ b/nibabel/streamlines/tests/test_tck.py @@ -14,17 +14,15 @@ from .. import tck as tck_module from ..tck import TckFile -import pytest; pytestmark = pytest.mark.skip() - -from nose.tools import assert_equal, assert_raises, assert_true +import pytest from numpy.testing import assert_array_equal -from nibabel.testing import data_path, clear_and_catch_warnings +from ...testing_pytest import data_path, clear_and_catch_warnings from .test_tractogram import assert_tractogram_equal DATA = {} -def setup(): +def setup_module(): global DATA DATA['empty_tck_fname'] = pjoin(data_path, "empty.tck") @@ -71,8 +69,8 @@ def test_load_matlab_nan_file(self): for lazy_load in [False, True]: tck = TckFile.load(DATA['matlab_nan_tck_fname'], lazy_load=lazy_load) streamlines = list(tck.tractogram.streamlines) - assert_equal(len(streamlines), 1) - assert_equal(streamlines[0].shape, (108, 3)) + assert len(streamlines) == 1 + assert streamlines[0].shape == (108, 3) def test_writeable_data(self): data = DATA['simple_tractogram'] @@ -82,7 +80,7 @@ def test_writeable_data(self): for actual, expected_tgi in zip(tck.streamlines, data): assert_array_equal(actual, expected_tgi.streamline) # Test we can write to arrays - assert_true(actual.flags.writeable) + assert actual.flags.writeable actual[0, 0] = 99 def test_load_simple_file_in_big_endian(self): @@ -90,7 +88,7 @@ def test_load_simple_file_in_big_endian(self): tck = TckFile.load(DATA['simple_tck_big_endian_fname'], lazy_load=lazy_load) assert_tractogram_equal(tck.tractogram, DATA['simple_tractogram']) - assert_equal(tck.header['datatype'], 'Float32BE') + assert tck.header['datatype'] == 'Float32BE' def test_load_file_with_wrong_information(self): tck_file = open(DATA['simple_tck_fname'], 'rb').read() @@ -98,12 +96,15 @@ def test_load_file_with_wrong_information(self): # Simulate a TCK file where `datatype` has not the right endianness. new_tck_file = tck_file.replace(asbytes("Float32LE"), asbytes("Float32BE")) - assert_raises(DataError, TckFile.load, BytesIO(new_tck_file)) + + with pytest.raises(DataError): + TckFile.load(BytesIO(new_tck_file)) # Simulate a TCK file with unsupported `datatype`. new_tck_file = tck_file.replace(asbytes("Float32LE"), asbytes("int32")) - assert_raises(HeaderError, TckFile.load, BytesIO(new_tck_file)) + with pytest.raises(HeaderError): + TckFile.load(BytesIO(new_tck_file)) # Simulate a TCK file with no `datatype` field. new_tck_file = tck_file.replace(b"datatype: Float32LE\n", b"") @@ -111,24 +112,25 @@ def test_load_file_with_wrong_information(self): new_tck_file = new_tck_file.replace(b"file: . 67\n", b"file: . 47\n") with clear_and_catch_warnings(record=True, modules=[tck_module]) as w: tck = TckFile.load(BytesIO(new_tck_file)) - assert_equal(len(w), 1) - assert_true(issubclass(w[0].category, HeaderWarning)) - assert_true("Missing 'datatype'" in str(w[0].message)) + assert len(w) == 1 + assert issubclass(w[0].category, HeaderWarning) + assert "Missing 'datatype'" in str(w[0].message) assert_array_equal(tck.header['datatype'], "Float32LE") # Simulate a TCK file with no `file` field. new_tck_file = tck_file.replace(b"\nfile: . 67", b"") with clear_and_catch_warnings(record=True, modules=[tck_module]) as w: tck = TckFile.load(BytesIO(new_tck_file)) - assert_equal(len(w), 1) - assert_true(issubclass(w[0].category, HeaderWarning)) - assert_true("Missing 'file'" in str(w[0].message)) + assert len(w) == 1 + assert issubclass(w[0].category, HeaderWarning) + assert "Missing 'file'" in str(w[0].message) assert_array_equal(tck.header['file'], ". 56") # Simulate a TCK file with `file` field pointing to another file. new_tck_file = tck_file.replace(b"file: . 67\n", b"file: dummy.mat 75\n") - assert_raises(HeaderError, TckFile.load, BytesIO(new_tck_file)) + with pytest.raises(HeaderError): + TckFile.load(BytesIO(new_tck_file)) # Simulate a TCK file which is missing a streamline delimiter. eos = TckFile.FIBER_DELIMITER.tostring() @@ -139,11 +141,13 @@ def test_load_file_with_wrong_information(self): buffer_size = 1. / 1024**2 # 1 bytes hdr = TckFile._read_header(BytesIO(new_tck_file)) tck_reader = TckFile._read(BytesIO(new_tck_file), hdr, buffer_size) - assert_raises(DataError, list, tck_reader) + with pytest.raises(DataError): + list(tck_reader) # Simulate a TCK file which is missing the end-of-file delimiter. new_tck_file = tck_file[:-len(eof)] - assert_raises(DataError, TckFile.load, BytesIO(new_tck_file)) + with pytest.raises(DataError): + TckFile.load(BytesIO(new_tck_file)) def test_write_empty_file(self): tractogram = Tractogram(affine_to_rasmm=np.eye(4)) @@ -160,8 +164,7 @@ def test_write_empty_file(self): assert_tractogram_equal(new_tck.tractogram, new_tck_orig.tractogram) tck_file.seek(0, os.SEEK_SET) - assert_equal(tck_file.read(), - open(DATA['empty_tck_fname'], 'rb').read()) + assert tck_file.read() == open(DATA['empty_tck_fname'], 'rb').read() def test_write_simple_file(self): tractogram = Tractogram(DATA['streamlines'], @@ -179,17 +182,18 @@ def test_write_simple_file(self): assert_tractogram_equal(new_tck.tractogram, new_tck_orig.tractogram) tck_file.seek(0, os.SEEK_SET) - assert_equal(tck_file.read(), - open(DATA['simple_tck_fname'], 'rb').read()) + assert tck_file.read() == open(DATA['simple_tck_fname'], 'rb').read() # TCK file containing not well formatted entries in its header. tck_file = BytesIO() tck = TckFile(tractogram) tck.header['new_entry'] = 'value\n' # \n not allowed - assert_raises(HeaderError, tck.save, tck_file) + with pytest.raises(HeaderError): + tck.save(tck_file) tck.header['new_entry'] = 'val:ue' # : not allowed - assert_raises(HeaderError, tck.save, tck_file) + with pytest.raises(HeaderError): + tck.save(tck_file) def test_load_write_file(self): for fname in [DATA['empty_tck_fname'], @@ -204,7 +208,7 @@ def test_load_write_file(self): # Check that the written file is the same as the one read. tck_file.seek(0, os.SEEK_SET) - assert_equal(tck_file.read(), open(fname, 'rb').read()) + assert tck_file.read() == open(fname, 'rb').read() # Save tractogram that has an affine_to_rasmm. for lazy_load in [False, True]: From d79af8b9a800204731339c6314268aa005f978ea Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 15:26:13 -0500 Subject: [PATCH 506/689] pytest port --- nibabel/gifti/tests/test_gifti.py | 187 +++++++++++++++--------------- 1 file changed, 93 insertions(+), 94 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 167e56d6cf..8a0b3d327d 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -16,7 +16,7 @@ from numpy.testing import (assert_array_almost_equal, assert_array_equal) -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +import pytest from nibabel.testing import clear_and_catch_warnings, test_data from .test_parse_gifti_fast import (DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6) @@ -35,7 +35,7 @@ def test_agg_data(): func_data = np.column_stack(tuple(da.data for da in func_da)) shape_data = shape_gii_img.get_arrays_from_intent('shape')[0].data - assert_equal(surf_gii_img.agg_data(), (point_data, triangle_data)) + assert surf_gii_img.agg_data() == (point_data, triangle_data) assert_array_equal(func_gii_img.agg_data(), func_data) assert_array_equal(shape_gii_img.agg_data(), shape_data) @@ -44,45 +44,45 @@ def test_agg_data(): assert_array_equal(func_gii_img.agg_data('time series'), func_data) assert_array_equal(shape_gii_img.agg_data('shape'), shape_data) - assert_equal(surf_gii_img.agg_data('time series'), ()) - assert_equal(func_gii_img.agg_data('triangle'), ()) - assert_equal(shape_gii_img.agg_data('pointset'), ()) + assert surf_gii_img.agg_data('time series') == () + assert func_gii_img.agg_data('triangle') == () + assert shape_gii_img.agg_data('pointset') == () - assert_equal(surf_gii_img.agg_data(('pointset', 'triangle')), (point_data, triangle_data)) - assert_equal(surf_gii_img.agg_data(('triangle', 'pointset')), (triangle_data, point_data)) + assert surf_gii_img.agg_data(('pointset', 'triangle')) == (point_data, triangle_data) + assert surf_gii_img.agg_data(('triangle', 'pointset')) == (triangle_data, point_data) def test_gifti_image(): # Check that we're not modifying the default empty list in the default # arguments. gi = GiftiImage() - assert_equal(gi.darrays, []) - assert_equal(gi.meta.metadata, {}) - assert_equal(gi.labeltable.labels, []) + assert gi.darrays == [] + assert gi.meta.metadata == {} + assert gi.labeltable.labels == [] arr = np.zeros((2, 3)) gi.darrays.append(arr) # Now check we didn't overwrite the default arg gi = GiftiImage() - assert_equal(gi.darrays, []) + assert gi.darrays == [] # Test darrays / numDA gi = GiftiImage() - assert_equal(gi.numDA, 0) + assert gi.numDA == 0 # Test from numpy numeric array data = np.random.random((5,)) da = GiftiDataArray(data) gi.add_gifti_data_array(da) - assert_equal(gi.numDA, 1) + assert gi.numDA == 1 assert_array_equal(gi.darrays[0].data, data) # Test removing gi.remove_gifti_data_array(0) - assert_equal(gi.numDA, 0) + assert gi.numDA == 0 # Remove from empty gi = GiftiImage() gi.remove_gifti_data_array_by_intent(0) - assert_equal(gi.numDA, 0) + assert gi.numDA == 0 # Remove one gi = GiftiImage() @@ -90,113 +90,112 @@ def test_gifti_image(): gi.add_gifti_data_array(da) gi.remove_gifti_data_array_by_intent(3) - assert_equal(gi.numDA, 1, "data array should exist on 'missed' remove") + assert gi.numDA == 1, "data array should exist on 'missed' remove" gi.remove_gifti_data_array_by_intent(da.intent) - assert_equal(gi.numDA, 0) + assert gi.numDA == 0 def test_gifti_image_bad_inputs(): img = GiftiImage() # Try to set a non-data-array - assert_raises(TypeError, img.add_gifti_data_array, 'not-a-data-array') + pytest.raises(TypeError, img.add_gifti_data_array, 'not-a-data-array') # Try to set to non-table def assign_labeltable(val): img.labeltable = val - assert_raises(TypeError, assign_labeltable, 'not-a-table') + pytest.raises(TypeError, assign_labeltable, 'not-a-table') # Try to set to non-table def assign_metadata(val): img.meta = val - assert_raises(TypeError, assign_metadata, 'not-a-meta') + pytest.raises(TypeError, assign_metadata, 'not-a-meta') def test_dataarray_empty(): # Test default initialization of DataArray null_da = GiftiDataArray() - assert_equal(null_da.data, None) - assert_equal(null_da.intent, 0) - assert_equal(null_da.datatype, 0) - assert_equal(null_da.encoding, 3) - assert_equal(null_da.endian, 2 if sys.byteorder == 'little' else 1) - assert_equal(null_da.coordsys.dataspace, 0) - assert_equal(null_da.coordsys.xformspace, 0) + assert null_da.data is None + assert null_da.intent == 0 + assert null_da.datatype == 0 + assert null_da.encoding == 3 + assert null_da.endian == (2 if sys.byteorder == 'little' else 1) + assert null_da.coordsys.dataspace == 0 + assert null_da.coordsys.xformspace == 0 assert_array_equal(null_da.coordsys.xform, np.eye(4)) - assert_equal(null_da.ind_ord, 1) - assert_equal(null_da.meta.metadata, {}) - assert_equal(null_da.ext_fname, '') - assert_equal(null_da.ext_offset, 0) + assert null_da.ind_ord == 1 + assert null_da.meta.metadata == {} + assert null_da.ext_fname == '' + assert null_da.ext_offset == 0 def test_dataarray_init(): # Test non-default dataarray initialization gda = GiftiDataArray # shortcut - assert_equal(gda(None).data, None) + assert gda(None).data is None arr = np.arange(12, dtype=np.float32).reshape((3, 4)) assert_array_equal(gda(arr).data, arr) # Intents - assert_raises(KeyError, gda, intent=1) # Invalid code - assert_raises(KeyError, gda, intent='not an intent') # Invalid string - assert_equal(gda(intent=2).intent, 2) - assert_equal(gda(intent='correlation').intent, 2) - assert_equal(gda(intent='NIFTI_INTENT_CORREL').intent, 2) + pytest.raises(KeyError, gda, intent=1) # Invalid code + pytest.raises(KeyError, gda, intent='not an intent') # Invalid string + assert gda(intent=2).intent == 2 + assert gda(intent='correlation').intent == 2 + assert gda(intent='NIFTI_INTENT_CORREL').intent == 2 # Datatype - assert_equal(gda(datatype=2).datatype, 2) - assert_equal(gda(datatype='uint8').datatype, 2) - assert_raises(KeyError, gda, datatype='not_datatype') + assert gda(datatype=2).datatype == 2 + assert gda(datatype='uint8').datatype == 2 + pytest.raises(KeyError, gda, datatype='not_datatype') # Float32 datatype comes from array if datatype not set - assert_equal(gda(arr).datatype, 16) + assert gda(arr).datatype == 16 # Can be overriden by init - assert_equal(gda(arr, datatype='uint8').datatype, 2) + assert gda(arr, datatype='uint8').datatype == 2 # Encoding - assert_equal(gda(encoding=1).encoding, 1) - assert_equal(gda(encoding='ASCII').encoding, 1) - assert_equal(gda(encoding='GIFTI_ENCODING_ASCII').encoding, 1) - assert_raises(KeyError, gda, encoding='not an encoding') + assert gda(encoding=1).encoding == 1 + assert gda(encoding='ASCII').encoding == 1 + assert gda(encoding='GIFTI_ENCODING_ASCII').encoding == 1 + pytest.raises(KeyError, gda, encoding='not an encoding') # Endian - assert_equal(gda(endian=1).endian, 1) - assert_equal(gda(endian='big').endian, 1) - assert_equal(gda(endian='GIFTI_ENDIAN_BIG').endian, 1) - assert_raises(KeyError, gda, endian='not endian code') + assert gda(endian=1).endian == 1 + assert gda(endian='big').endian == 1 + assert gda(endian='GIFTI_ENDIAN_BIG').endian == 1 + pytest.raises(KeyError, gda, endian='not endian code') # CoordSys aff = np.diag([2, 3, 4, 1]) cs = GiftiCoordSystem(1, 2, aff) da = gda(coordsys=cs) - assert_equal(da.coordsys.dataspace, 1) - assert_equal(da.coordsys.xformspace, 2) + assert da.coordsys.dataspace == 1 + assert da.coordsys.xformspace == 2 assert_array_equal(da.coordsys.xform, aff) # Ordering - assert_equal(gda(ordering=2).ind_ord, 2) - assert_equal(gda(ordering='F').ind_ord, 2) - assert_equal(gda(ordering='ColumnMajorOrder').ind_ord, 2) - assert_raises(KeyError, gda, ordering='not an ordering') + assert gda(ordering=2).ind_ord == 2 + assert gda(ordering='F').ind_ord == 2 + assert gda(ordering='ColumnMajorOrder').ind_ord == 2 + pytest.raises(KeyError, gda, ordering='not an ordering') # metadata meta_dict=dict(one=1, two=2) - assert_equal(gda(meta=GiftiMetaData.from_dict(meta_dict)).meta.metadata, - meta_dict) - assert_equal(gda(meta=meta_dict).meta.metadata, meta_dict) - assert_equal(gda(meta=None).meta.metadata, {}) + assert gda(meta=GiftiMetaData.from_dict(meta_dict)).meta.metadata == meta_dict + assert gda(meta=meta_dict).meta.metadata == meta_dict + assert gda(meta=None).meta.metadata == {} # ext_fname and ext_offset - assert_equal(gda(ext_fname='foo').ext_fname, 'foo') - assert_equal(gda(ext_offset=12).ext_offset, 12) + assert gda(ext_fname='foo').ext_fname == 'foo' + assert gda(ext_offset=12).ext_offset == 12 def test_dataarray_from_array(): with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) da = GiftiDataArray.from_array(np.ones((3, 4))) - assert_equal(len(w), 1) + assert len(w) == 1 for dt_code in data_type_codes.value_set(): data_type = data_type_codes.type[dt_code] if data_type is np.void: # not supported continue arr = np.zeros((10, 3), dtype=data_type) da = GiftiDataArray.from_array(arr, 'triangle') - assert_equal(da.datatype, data_type_codes[arr.dtype]) + assert da.datatype == data_type_codes[arr.dtype] bs_arr = arr.byteswap().newbyteorder() da = GiftiDataArray.from_array(bs_arr, 'triangle') - assert_equal(da.datatype, data_type_codes[arr.dtype]) + assert da.datatype == data_type_codes[arr.dtype] def test_to_xml_open_close_deprecations(): @@ -204,35 +203,35 @@ def test_to_xml_open_close_deprecations(): da = GiftiDataArray(np.ones((1,)), 'triangle') with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) - assert_true(isinstance(da.to_xml_open(), str)) - assert_equal(len(w), 1) + assert isinstance(da.to_xml_open(), str) + assert len(w) == 1 with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) - assert_true(isinstance(da.to_xml_close(), str)) - assert_equal(len(w), 1) + assert isinstance(da.to_xml_close(), str) + assert len(w) == 1 def test_num_dim_deprecation(): da = GiftiDataArray(np.ones((2, 3, 4))) # num_dim is property, set automatically from len(da.dims) - assert_equal(da.num_dim, 3) + assert da.num_dim == 3 with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) # OK setting num_dim to correct value, but raises DeprecationWarning da.num_dim = 3 - assert_equal(len(w), 1) + assert len(w) == 1 # Any other value gives a ValueError - assert_raises(ValueError, setattr, da, 'num_dim', 4) + pytest.raises(ValueError, setattr, da, 'num_dim', 4) def test_labeltable(): img = GiftiImage() - assert_equal(len(img.labeltable.labels), 0) + assert len(img.labeltable.labels) == 0 new_table = GiftiLabelTable() new_table.labels += ['test', 'me'] img.labeltable = new_table - assert_equal(len(img.labeltable.labels), 2) + assert len(img.labeltable.labels) == 2 # Test deprecations with clear_and_catch_warnings() as w: @@ -240,23 +239,23 @@ def test_labeltable(): newer_table = GiftiLabelTable() newer_table.labels += ['test', 'me', 'again'] img.set_labeltable(newer_table) - assert_equal(len(w), 1) - assert_equal(len(img.get_labeltable().labels), 3) - assert_equal(len(w), 2) + assert len(w) == 1 + assert len(img.get_labeltable().labels) == 3 + assert len(w) == 2 def test_metadata(): nvpair = GiftiNVPairs('key', 'value') md = GiftiMetaData(nvpair=nvpair) - assert_equal(md.data[0].name, 'key') - assert_equal(md.data[0].value, 'value') + assert md.data[0].name == 'key' + assert md.data[0].value == 'value' # Test deprecation with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) - assert_equal(md.get_metadata(), dict(key='value')) - assert_equal(len(w), 1) - assert_equal(len(GiftiDataArray().get_metadata()), 0) - assert_equal(len(w), 2) + assert md.get_metadata() == dict(key='value') + assert len(w) == 1 + assert len(GiftiDataArray().get_metadata()) == 0 + assert len(w) == 2 def test_gifti_label_rgba(): @@ -267,31 +266,31 @@ def test_gifti_label_rgba(): assert_array_equal(rgba, gl1.rgba) gl1.red = 2 * gl1.red - assert_false(np.allclose(rgba, gl1.rgba)) # don't just store the list! + assert not np.allclose(rgba, gl1.rgba) # don't just store the list! gl2 = GiftiLabel() gl2.rgba = rgba assert_array_equal(rgba, gl2.rgba) gl2.blue = 2 * gl2.blue - assert_false(np.allclose(rgba, gl2.rgba)) # don't just store the list! + assert not np.allclose(rgba, gl2.rgba) # don't just store the list! def assign_rgba(gl, val): gl.rgba = val gl3 = GiftiLabel(**kwargs) - assert_raises(ValueError, assign_rgba, gl3, rgba[:2]) - assert_raises(ValueError, assign_rgba, gl3, rgba.tolist() + rgba.tolist()) + pytest.raises(ValueError, assign_rgba, gl3, rgba[:2]) + pytest.raises(ValueError, assign_rgba, gl3, rgba.tolist() + rgba.tolist()) # Test deprecation with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) - assert_equal(kwargs['red'], gl3.get_rgba()[0]) - assert_equal(len(w), 1) + assert kwargs['red'] == gl3.get_rgba()[0] + assert len(w) == 1 # Test default value gl4 = GiftiLabel() - assert_equal(len(gl4.rgba), 4) - assert_true(np.all([elem is None for elem in gl4.rgba])) + assert len(gl4.rgba) == 4 + assert np.all([elem is None for elem in gl4.rgba]) def test_print_summary(): @@ -304,7 +303,7 @@ def test_print_summary(): def test_gifti_coord(): from ..gifti import GiftiCoordSystem gcs = GiftiCoordSystem() - assert_true(gcs.xform is not None) + assert gcs.xform is not None # Smoke test gcs.xform = None @@ -316,7 +315,7 @@ def test_data_tag_deprecated(): with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) data_tag(np.array([]), 'ASCII', '%i', 1) - assert_equal(len(w), 1) + assert len(w) == 1 def test_gifti_round_trip(): @@ -443,5 +442,5 @@ def test_darray_dtype_coercion_failures(): gii = GiftiImage(darrays=[da]) gii_copy = GiftiImage.from_bytes(gii.to_bytes()) da_copy = gii_copy.darrays[0] - assert_equal(np.dtype(da_copy.data.dtype), np.dtype(darray_dtype)) + assert np.dtype(da_copy.data.dtype) == np.dtype(darray_dtype) assert_array_equal(da_copy.data, da.data) From 5695644074d5b754782350e844a15ff900db37e5 Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 13:25:18 -0500 Subject: [PATCH 507/689] drop the nose --- .azure-pipelines/windows.yml | 12 ++++-------- azure-pipelines.yml | 4 ---- dev-requirements.txt | 1 + 3 files changed, 5 insertions(+), 12 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 469fd27c96..34b2aaf96f 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -11,7 +11,7 @@ jobs: variables: EXTRA_WHEELS: "https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" DEPENDS: numpy scipy matplotlib h5py pydicom - CHECK_TYPE: nosetests + CHECK_TYPE: test strategy: matrix: ${{ insert }}: ${{ parameters.matrix }} @@ -30,7 +30,7 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% - python -m pip install nose coverage codecov pytest + python -m pip install coverage codecov pytest displayName: 'Install dependencies' - script: | python -m pip install .[$(CHECK_TYPE)] @@ -40,12 +40,8 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - if $(CHECK_TYPE)=="nosetests" ( - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - ) else ( - pytest --cov nibabel -v --pyargs nibabel --deselect streamlines - ) - displayName: 'Nose tests' + pytest --cov nibabel -v --pyargs nibabel --deselect streamlines + displayName: 'Run tests' - script: | cd for_testing codecov diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 5bbd37986c..d09c5b7740 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -34,7 +34,3 @@ jobs: py38-x64: PYTHON_VERSION: '3.8' PYTHON_ARCH: 'x64' - pytest: - PYTHON_VERSION: '3.6' - PYTHON_ARCH: 'x64' - CHECK_TYPE: 'test' diff --git a/dev-requirements.txt b/dev-requirements.txt index 659ab6cada..aa0980c3b4 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,3 +1,4 @@ # Requirements for running tests -r requirements.txt nose +pytest From 636dc5599b40f52a27775e5b72af47b8aaf196b8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 15:31:52 -0500 Subject: [PATCH 508/689] CI: Give up trying to deselect streamlines --- .azure-pipelines/windows.yml | 2 +- .travis.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 34b2aaf96f..cf9b82a1c0 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -40,7 +40,7 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - pytest --cov nibabel -v --pyargs nibabel --deselect streamlines + pytest --cov nibabel -v --pyargs nibabel displayName: 'Run tests' - script: | cd for_testing diff --git a/.travis.yml b/.travis.yml index b6ef1a18a0..f8333d39a7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -196,7 +196,7 @@ script: mkdir for_testing cd for_testing cp ../.coveragerc . - pytest --cov nibabel -v --pyargs nibabel --deselect streamlines + pytest --cov nibabel -v --pyargs nibabel else false fi From 85ac45260270bd0c45c2c3bc91281e4e237990f9 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 15:35:14 -0500 Subject: [PATCH 509/689] changed nosetools assert_true to assert in test_io.py --- nibabel/freesurfer/tests/test_io.py | 35 +++++++++++++++-------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index fc926ee2af..41bcdd17cf 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -9,7 +9,8 @@ from ...tmpdirs import InTemporaryDirectory -from nose.tools import assert_true + +import pytest import numpy as np from numpy.testing import assert_equal, assert_raises, dec, assert_allclose, assert_array_equal @@ -92,13 +93,13 @@ def test_geometry(): with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) read_geometry(surf_path, read_metadata=True) - assert_true(any('volume information contained' in str(ww.message) + assert(any('volume information contained' in str(ww.message) for ww in w)) - assert_true(any('extension code' in str(ww.message) for ww in w)) + assert(any('extension code' in str(ww.message) for ww in w)) volume_info['head'] = [1, 2] with clear_and_catch_warnings() as w: write_geometry(surf_path, coords, faces, create_stamp, volume_info) - assert_true(any('Unknown extension' in str(ww.message) for ww in w)) + assert(any('Unknown extension' in str(ww.message) for ww in w)) volume_info['a'] = 0 assert_raises(ValueError, write_geometry, surf_path, coords, faces, create_stamp, volume_info) @@ -137,8 +138,8 @@ def test_morph_data(): """Test IO of morphometry data file (eg. curvature).""" curv_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "curv")) curv = read_morph_data(curv_path) - assert_true(-1.0 < curv.min() < 0) - assert_true(0 < curv.max() < 1.0) + assert(-1.0 < curv.min() < 0) + assert(0 < curv.max() < 1.0) with InTemporaryDirectory(): new_path = 'test' write_morph_data(new_path, curv) @@ -177,8 +178,8 @@ def test_annot(): hash_ = _hash_file_content(annot_path) labels, ctab, names = read_annot(annot_path) - assert_true(labels.shape == (163842, )) - assert_true(ctab.shape == (len(names), 5)) + assert(labels.shape == (163842, )) + assert(ctab.shape == (len(names), 5)) labels_orig = None if a == 'aparc': @@ -186,9 +187,9 @@ def test_annot(): np.testing.assert_array_equal(labels == -1, labels_orig == 0) # Handle different version of fsaverage if hash_ == 'bf0b488994657435cdddac5f107d21e8': - assert_true(np.sum(labels_orig == 0) == 13887) + assert(np.sum(labels_orig == 0) == 13887) elif hash_ == 'd4f5b7cbc2ed363ac6fcf89e19353504': - assert_true(np.sum(labels_orig == 1639705) == 13327) + assert(np.sum(labels_orig == 1639705) == 13327) else: raise RuntimeError("Unknown freesurfer file. Please report " "the problem to the maintainer of nibabel.") @@ -272,7 +273,7 @@ def test_write_annot_fill_ctab(): print(labels) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert_true( + assert( any('Annotation values in {} will be incorrect'.format( annot_path) == str(ww.message) for ww in w)) labels2, rgbal2, names2 = read_annot(annot_path, orig_ids=True) @@ -288,7 +289,7 @@ def test_write_annot_fill_ctab(): rgbal[:, 2] * (2 ** 16)) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert_true( + assert( not any('Annotation values in {} will be incorrect'.format( annot_path) == str(ww.message) for ww in w)) labels2, rgbal2, names2 = read_annot(annot_path) @@ -348,13 +349,13 @@ def test_label(): label_path = pjoin(data_path, "label", "lh.cortex.label") label = read_label(label_path) # XXX : test more - assert_true(label.min() >= 0) - assert_true(label.max() <= 163841) - assert_true(label.shape[0] <= 163842) + assert(label.min() >= 0) + assert(label.max() <= 163841) + assert(label.shape[0] <= 163842) labels, scalars = read_label(label_path, True) - assert_true(np.all(labels == label)) - assert_true(len(labels) == len(scalars)) + assert(np.all(labels == label)) + assert(len(labels) == len(scalars)) def test_write_annot_maxstruct(): From 62d53c3712fe3a268849d4cc5f2feae1a02fd52c Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 15:35:28 -0500 Subject: [PATCH 510/689] finish porting gifti module tests to pytest --- nibabel/gifti/tests/test_giftiio.py | 8 +- nibabel/gifti/tests/test_parse_gifti_fast.py | 115 +++++++++---------- 2 files changed, 60 insertions(+), 63 deletions(-) diff --git a/nibabel/gifti/tests/test_giftiio.py b/nibabel/gifti/tests/test_giftiio.py index 90fe3e4d37..87d28b00c4 100644 --- a/nibabel/gifti/tests/test_giftiio.py +++ b/nibabel/gifti/tests/test_giftiio.py @@ -9,8 +9,6 @@ import warnings -from nose.tools import (assert_true, assert_false, assert_equal, - assert_raises) from nibabel.testing import clear_and_catch_warnings from nibabel.tmpdirs import InTemporaryDirectory @@ -24,7 +22,7 @@ class TestGiftiIO(object): def setUp(self): with clear_and_catch_warnings() as w: warnings.simplefilter('always', DeprecationWarning) - assert_equal(len(w), 1) + assert len(w) == 1 def test_read_deprecated(): @@ -33,7 +31,7 @@ def test_read_deprecated(): from nibabel.gifti.giftiio import read, write img = read(DATA_FILE1) - assert_equal(len(w), 1) + assert len(w) == 1 with InTemporaryDirectory(): write(img, 'test.gii') - assert_equal(len(w), 2) + assert len(w) == 2 diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index a06180a964..15a2c69f1c 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -22,8 +22,7 @@ from numpy.testing import assert_array_almost_equal -from nose.tools import (assert_true, assert_false, assert_equal, - assert_raises) +import pytest from ...testing import clear_and_catch_warnings @@ -106,9 +105,9 @@ def assert_default_types(loaded): if defaulttype is type(None): continue loadedtype = type(getattr(loaded, attr)) - assert_equal(loadedtype, defaulttype, - "Type mismatch for attribute: {} ({!s} != {!s})".format( - attr, loadedtype, defaulttype)) + assert loadedtype == defaulttype, ( + "Type mismatch for attribute: {} ({!s} != {!s})".format( + attr, loadedtype, defaulttype)) def test_default_types(): @@ -142,18 +141,18 @@ def test_read_ordering(): # read another image first (DATA_FILE2) then the shape is wrong # Read an image img2 = load(DATA_FILE2) - assert_equal(img2.darrays[0].data.shape, (143479, 1)) + assert img2.darrays[0].data.shape == (143479, 1) # Read image for which we know output shape img = load(DATA_FILE1) - assert_equal(img.darrays[0].data.shape, (3, 3)) + assert img.darrays[0].data.shape == (3, 3) def test_load_metadata(): for i, dat in enumerate(datafiles): img = load(dat) img.meta - assert_equal(numDA[i], img.numDA) - assert_equal(img.version, '1.0') + assert numDA[i] == img.numDA + assert img.version == '1.0' def test_metadata_deprecations(): @@ -163,12 +162,12 @@ def test_metadata_deprecations(): # Test deprecation with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) - assert_equal(me, img.get_meta()) + assert me == img.get_meta() with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) img.set_metadata(me) - assert_equal(me, img.meta) + assert me == img.meta def test_load_dataarray1(): @@ -181,14 +180,14 @@ def test_load_dataarray1(): assert_array_almost_equal(img.darrays[0].data, DATA_FILE1_darr1) assert_array_almost_equal(img.darrays[1].data, DATA_FILE1_darr2) me = img.darrays[0].meta.metadata - assert_true('AnatomicalStructurePrimary' in me) - assert_true('AnatomicalStructureSecondary' in me) - assert_equal(me['AnatomicalStructurePrimary'], 'CortexLeft') + assert 'AnatomicalStructurePrimary' in me + assert 'AnatomicalStructureSecondary' in me + me['AnatomicalStructurePrimary'] == 'CortexLeft' assert_array_almost_equal(img.darrays[0].coordsys.xform, np.eye(4, 4)) - assert_equal(xform_codes.niistring[img.darrays[ - 0].coordsys.dataspace], 'NIFTI_XFORM_TALAIRACH') - assert_equal(xform_codes.niistring[img.darrays[ - 0].coordsys.xformspace], 'NIFTI_XFORM_TALAIRACH') + assert xform_codes.niistring[ + img.darrays[0].coordsys.dataspace] == 'NIFTI_XFORM_TALAIRACH' + assert xform_codes.niistring[img.darrays[ + 0].coordsys.xformspace] == 'NIFTI_XFORM_TALAIRACH' def test_load_dataarray2(): @@ -223,7 +222,7 @@ def test_load_dataarray4(): def test_dataarray5(): img5 = load(DATA_FILE5) for da in img5.darrays: - assert_equal(gifti_endian_codes.byteorder[da.endian], 'little') + gifti_endian_codes.byteorder[da.endian] == 'little' assert_array_almost_equal(img5.darrays[0].data, DATA_FILE5_darr1) assert_array_almost_equal(img5.darrays[1].data, DATA_FILE5_darr2) # Round trip tested below @@ -234,24 +233,24 @@ def test_base64_written(): with open(DATA_FILE5, 'rb') as fobj: contents = fobj.read() # Confirm the bad tags are still in the file - assert_true(b'GIFTI_ENCODING_B64BIN' in contents) - assert_true(b'GIFTI_ENDIAN_LITTLE' in contents) + assert b'GIFTI_ENCODING_B64BIN' in contents + assert b'GIFTI_ENDIAN_LITTLE' in contents # The good ones are missing - assert_false(b'Base64Binary' in contents) - assert_false(b'LittleEndian' in contents) + assert b'Base64Binary' not in contents + assert b'LittleEndian' not in contents # Round trip img5 = load(DATA_FILE5) save(img5, 'fixed.gii') with open('fixed.gii', 'rb') as fobj: contents = fobj.read() # The bad codes have gone, replaced by the good ones - assert_false(b'GIFTI_ENCODING_B64BIN' in contents) - assert_false(b'GIFTI_ENDIAN_LITTLE' in contents) - assert_true(b'Base64Binary' in contents) + assert b'GIFTI_ENCODING_B64BIN' not in contents + assert b'GIFTI_ENDIAN_LITTLE' not in contents + assert b'Base64Binary' in contents if sys.byteorder == 'little': - assert_true(b'LittleEndian' in contents) + assert b'LittleEndian' in contents else: - assert_true(b'BigEndian' in contents) + assert b'BigEndian' in contents img5_fixed = load('fixed.gii') darrays = img5_fixed.darrays assert_array_almost_equal(darrays[0].data, DATA_FILE5_darr1) @@ -263,7 +262,7 @@ def test_readwritedata(): with InTemporaryDirectory(): save(img, 'test.gii') img2 = load('test.gii') - assert_equal(img.numDA, img2.numDA) + assert img.numDA == img2.numDA assert_array_almost_equal(img.darrays[0].data, img2.darrays[0].data) @@ -272,7 +271,7 @@ def test_modify_darray(): img = load(fname) darray = img.darrays[0] darray.data[:] = 0 - assert_true(np.array_equiv(darray.data, 0)) + assert np.array_equiv(darray.data, 0) def test_write_newmetadata(): @@ -281,32 +280,32 @@ def test_write_newmetadata(): newmeta = gi.GiftiMetaData(attr) img.meta = newmeta myme = img.meta.metadata - assert_true('mykey' in myme) + assert 'mykey' in myme newmeta = gi.GiftiMetaData.from_dict({'mykey1': 'val2'}) img.meta = newmeta myme = img.meta.metadata - assert_true('mykey1' in myme) - assert_false('mykey' in myme) + assert 'mykey1' in myme + assert 'mykey' not in myme def test_load_getbyintent(): img = load(DATA_FILE1) da = img.get_arrays_from_intent("NIFTI_INTENT_POINTSET") - assert_equal(len(da), 1) + assert len(da) == 1 with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=DeprecationWarning) da = img.getArraysFromIntent("NIFTI_INTENT_POINTSET") - assert_equal(len(da), 1) - assert_equal(len(w), 1) - assert_equal(w[0].category, DeprecationWarning) + assert len(da) == 1 + assert len(w) == 1 + w[0].category == DeprecationWarning da = img.get_arrays_from_intent("NIFTI_INTENT_TRIANGLE") - assert_equal(len(da), 1) + assert len(da) == 1 da = img.get_arrays_from_intent("NIFTI_INTENT_CORREL") - assert_equal(len(da), 0) - assert_equal(da, []) + assert len(da) == 0 + assert da == [] def test_load_labeltable(): @@ -317,15 +316,15 @@ def test_load_labeltable(): bimg = load('test.gii') for img in (img6, bimg): assert_array_almost_equal(img.darrays[0].data[:3], DATA_FILE6_darr1) - assert_equal(len(img.labeltable.labels), 36) + assert len(img.labeltable.labels) == 36 labeldict = img.labeltable.get_labels_as_dict() - assert_true(660700 in labeldict) - assert_equal(labeldict[660700], 'entorhinal') - assert_equal(img.labeltable.labels[1].key, 2647065) - assert_equal(img.labeltable.labels[1].red, 0.0980392) - assert_equal(img.labeltable.labels[1].green, 0.392157) - assert_equal(img.labeltable.labels[1].blue, 0.156863) - assert_equal(img.labeltable.labels[1].alpha, 1) + assert 660700 in labeldict + assert labeldict[660700] == 'entorhinal' + assert img.labeltable.labels[1].key == 2647065 + assert img.labeltable.labels[1].red == 0.0980392 + assert img.labeltable.labels[1].green == 0.392157 + assert img.labeltable.labels[1].blue == 0.156863 + assert img.labeltable.labels[1].alpha == 1 def test_labeltable_deprecations(): @@ -335,14 +334,14 @@ def test_labeltable_deprecations(): # Test deprecation with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) - assert_equal(lt, img.get_labeltable()) - assert_equal(len(w), 1) + assert lt == img.get_labeltable() + assert len(w) == 1 with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) img.set_labeltable(lt) - assert_equal(len(w), 1) - assert_equal(lt, img.labeltable) + assert len(w) == 1 + assert lt == img.labeltable def test_parse_dataarrays(): @@ -361,8 +360,8 @@ def test_parse_dataarrays(): with clear_and_catch_warnings() as w: warnings.filterwarnings('once', category=UserWarning) load(fn) - assert_equal(len(w), 1) - assert_equal(img.numDA, 0) + assert len(w) == 1 + assert img.numDA == 0 def test_parse_deprecated(): @@ -371,16 +370,16 @@ def test_parse_deprecated(): with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) op = Outputter() - assert_equal(len(w), 1) + assert len(w) == 1 op.initialize() # smoke test--no error. with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) - assert_raises(ValueError, parse_gifti_file) - assert_equal(len(w), 1) + pytest.raises(ValueError, parse_gifti_file) + assert len(w) == 1 def test_parse_with_buffersize(): for buff_sz in [None, 1, 2**12]: img2 = load(DATA_FILE2, buffer_size=buff_sz) - assert_equal(img2.darrays[0].data.shape, (143479, 1)) + assert img2.darrays[0].data.shape == (143479, 1) From 325080e3f9f0edd6c892c04c17293521ddc14101 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 14:57:23 -0500 Subject: [PATCH 511/689] TEST: Style cleanup --- nibabel/tests/test_arrayproxy.py | 8 +++---- nibabel/tests/test_arraywriters.py | 7 +++--- nibabel/tests/test_batteryrunners.py | 36 +++++----------------------- nibabel/tests/test_casting.py | 8 ++----- 4 files changed, 14 insertions(+), 45 deletions(-) diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index b00af7d90f..7b2fcca384 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -73,7 +73,7 @@ def test_init(): assert ap.shape == shape # shape should be read only with pytest.raises(AttributeError): - setattr(ap, 'shape', shape) + ap.shape = shape # Get the data assert_array_equal(np.asarray(ap), arr) # Check we can modify the original header without changing the ap version @@ -323,10 +323,8 @@ def check_mmap(hdr, offset, proxy_class, assert not unscaled_is_mmap assert not back_is_mmap else: - assert (unscaled_is_mmap == - (viral_memmap or unscaled_really_mmap)) - assert (back_is_mmap == - (viral_memmap or scaled_really_mmap)) + assert unscaled_is_mmap == (viral_memmap or unscaled_really_mmap) + assert back_is_mmap == (viral_memmap or scaled_really_mmap) if scaled_really_mmap: assert back_data.mode == expected_mode del prox, back_data diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index fa24b37102..8f30f04321 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -14,11 +14,10 @@ from ..casting import int_abs, type_info, shared_range, on_powerpc from ..volumeutils import array_from_file, apply_read_scaling, _dt_min_max -from numpy.testing import (assert_array_almost_equal, - assert_array_equal) +from numpy.testing import assert_array_almost_equal, assert_array_equal import pytest from ..testing_pytest import (assert_allclose_safely, suppress_warnings, - error_warnings) + error_warnings) FLOAT_TYPES = np.sctypes['float'] @@ -532,7 +531,7 @@ def test_nan2zero(): # Deprecation warning for nan2zero as argument to `to_fileobj` with error_warnings(): with pytest.raises(DeprecationWarning): - aw.to_fileobj(BytesIO(), 'F', False) + aw.to_fileobj(BytesIO(), 'F', False) with pytest.raises(DeprecationWarning): aw.to_fileobj(BytesIO(), 'F', nan2zero=False) # Error if nan2zero is not the value set at initialization diff --git a/nibabel/tests/test_batteryrunners.py b/nibabel/tests/test_batteryrunners.py index 883054ff96..69f18b75ac 100644 --- a/nibabel/tests/test_batteryrunners.py +++ b/nibabel/tests/test_batteryrunners.py @@ -159,43 +159,19 @@ def test_logging(): def test_checks(): battrun = BatteryRunner((chk1,)) reports = battrun.check_only({}) - assert (reports[0] == - Report(KeyError, - 20, - 'no "testkey"', - '')) + assert reports[0] == Report(KeyError, 20, 'no "testkey"', '') obj, reports = battrun.check_fix({}) - assert (reports[0] == - Report(KeyError, - 20, - 'no "testkey"', - 'added "testkey"')) + assert reports[0] == Report(KeyError, 20, 'no "testkey"', 'added "testkey"') assert obj == {'testkey': 1} battrun = BatteryRunner((chk1, chk2)) reports = battrun.check_only({}) - assert (reports[0] == - Report(KeyError, - 20, - 'no "testkey"', - '')) - assert (reports[1] == - Report(KeyError, - 20, - 'no "testkey"', - '')) + assert reports[0] == Report(KeyError, 20, 'no "testkey"', '') + assert reports[1] == Report(KeyError, 20, 'no "testkey"', '') obj, reports = battrun.check_fix({}) # In the case of fix, the previous fix exposes a different error # Note, because obj is mutable, first and second point to modified # (and final) dictionary output_obj = {'testkey': 0} - assert (reports[0] == - Report(KeyError, - 20, - 'no "testkey"', - 'added "testkey"')) - assert (reports[1] == - Report(ValueError, - 10, - '"testkey" != 0', - 'set "testkey" to 0')) + assert reports[0] == Report(KeyError, 20, 'no "testkey"', 'added "testkey"') + assert reports[1] == Report(ValueError, 10, '"testkey" != 0', 'set "testkey" to 0') assert obj == output_obj diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index 791cdacedb..3fe74dfb8b 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -43,9 +43,7 @@ def test_shared_range(): if imax_roundtrip == imax: thresh_overflow = True if thresh_overflow: - assert np.all( - (bit_bigger == casted_mx) | - (bit_bigger == imax)) + assert np.all((bit_bigger == casted_mx) | (bit_bigger == imax)) else: assert np.all((bit_bigger <= casted_mx)) if it in np.sctypes['uint']: @@ -71,9 +69,7 @@ def test_shared_range(): if imin_roundtrip == imin: thresh_overflow = True if thresh_overflow: - assert np.all( - (bit_smaller == casted_mn) | - (bit_smaller == imin)) + assert np.all((bit_smaller == casted_mn) | (bit_smaller == imin)) else: assert np.all((bit_smaller >= casted_mn)) From 5f695b8c3fc06536aebcfdc5542ce7968df5cb62 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 15:49:36 -0500 Subject: [PATCH 512/689] TEST: Simplify data/environment fixtures --- nibabel/tests/test_data.py | 17 ++++++----------- nibabel/tests/test_environment.py | 25 ++++++++++--------------- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index e48356eb50..1c8812fe61 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -19,24 +19,19 @@ import pytest -from .test_environment import (with_environment, - DATA_KEY, - USER_KEY) +from .test_environment import with_environment, DATA_KEY, USER_KEY -@pytest.fixture() +@pytest.fixture def with_nimd_env(request, with_environment): DATA_FUNCS = {} DATA_FUNCS['home_dir_func'] = nibd.get_nipy_user_dir DATA_FUNCS['sys_dir_func'] = nibd.get_nipy_system_dir DATA_FUNCS['path_func'] = nibd.get_data_path - - def teardown_data_env(): - nibd.get_nipy_user_dir = DATA_FUNCS['home_dir_func'] - nibd.get_nipy_system_dir = DATA_FUNCS['sys_dir_func'] - nibd.get_data_path = DATA_FUNCS['path_func'] - - request.addfinalizer(teardown_data_env) + yield + nibd.get_nipy_user_dir = DATA_FUNCS['home_dir_func'] + nibd.get_nipy_system_dir = DATA_FUNCS['sys_dir_func'] + nibd.get_data_path = DATA_FUNCS['path_func'] def test_datasource(): diff --git a/nibabel/tests/test_environment.py b/nibabel/tests/test_environment.py index 6dc127c95f..e0514c337e 100644 --- a/nibabel/tests/test_environment.py +++ b/nibabel/tests/test_environment.py @@ -14,7 +14,7 @@ USER_KEY = 'NIPY_USER_DIR' -@pytest.fixture() +@pytest.fixture def with_environment(request): """Setup test environment for some functions that are tested in this module. In particular this functions stores attributes @@ -24,20 +24,15 @@ def with_environment(request): """ GIVEN_ENV = {} GIVEN_ENV['env'] = env.copy() - - - def teardown_environment(): - """Restore things that were remembered by the setup_environment function - """ - orig_env = GIVEN_ENV['env'] - # Pull keys out into list to avoid altering dictionary during iteration, - # causing python 3 error - for key in list(env.keys()): - if key not in orig_env: - del env[key] - env.update(orig_env) - - request.addfinalizer(teardown_environment) + yield + """Restore things that were remembered by the setup_environment function """ + orig_env = GIVEN_ENV['env'] + # Pull keys out into list to avoid altering dictionary during iteration, + # causing python 3 error + for key in list(env.keys()): + if key not in orig_env: + del env[key] + env.update(orig_env) def test_nipy_home(): From b66ec38188ce7549861f128c39a651a704dfaa1c Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 15:56:59 -0500 Subject: [PATCH 513/689] make sure cov is installed --- .azure-pipelines/windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 469fd27c96..1d660089bb 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -30,7 +30,7 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% - python -m pip install nose coverage codecov pytest + python -m pip install nose coverage codecov pytest pytest-cov displayName: 'Install dependencies' - script: | python -m pip install .[$(CHECK_TYPE)] From 2bc53088975982ab5f562727ad939ddc8eeb4d1c Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 15:59:43 -0500 Subject: [PATCH 514/689] changed nosetools assertion to pytest in test_mghformat.py --- nibabel/freesurfer/tests/test_mghformat.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 289acbcd01..25a7254def 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -23,7 +23,8 @@ from ...wrapstruct import WrapStructError from ... import imageglobals -from nose.tools import assert_true, assert_false + +import pytest from numpy.testing import (assert_equal, assert_array_equal, assert_array_almost_equal, assert_almost_equal, @@ -260,7 +261,7 @@ def test_eq(): hdr2 = MGHHeader() assert_equal(hdr, hdr2) hdr.set_data_shape((2, 3, 4)) - assert_false(hdr == hdr2) + assert(hdr != hdr2) hdr2.set_data_shape((2, 3, 4)) assert_equal(hdr, hdr2) @@ -287,7 +288,7 @@ def test_mgh_load_fileobj(): bio = io.BytesIO(contents) fm = MGHImage.make_file_map(mapping=dict(image=bio)) img2 = MGHImage.from_file_map(fm) - assert_true(img2.dataobj.file_like is bio) + assert(img2.dataobj.file_like is bio) assert_array_equal(img.get_fdata(), img2.get_fdata()) @@ -477,7 +478,7 @@ def test_as_byteswapped(self): # same code just returns a copy for endianness in BIG_CODES: hdr2 = hdr.as_byteswapped(endianness) - assert_false(hdr2 is hdr) + assert(hdr2 is not hdr) assert_equal(hdr2, hdr) # Different code raises error From f3be7b238ba1e5e41eb475da7c406ac821aa2b8f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 16:00:29 -0500 Subject: [PATCH 515/689] TEST: Simplify, reformat test_data --- nibabel/tests/test_data.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index 1c8812fe61..e5d5000438 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -157,8 +157,7 @@ def test_data_path(with_nimd_env): with open(tmpfile, 'wt') as fobj: fobj.write('[DATA]\n') fobj.write('path = %s\n' % '/path/two') - assert (get_data_path() == - tst_list + ['/path/two'] + old_pth) + assert get_data_path() == tst_list + ['/path/two'] + old_pth def test_find_data_dir(): @@ -201,10 +200,10 @@ def test_make_datasource(with_nimd_env): assert ds.version == '0.1' +@pytest.mark.xfail(raises=DataError) def test_bomber(): - with pytest.raises(DataError): - b = Bomber('bomber example', 'a message') - b.any_attribute # no error + b = Bomber('bomber example', 'a message') + b.any_attribute # no error def test_bomber_inspect(): @@ -213,13 +212,12 @@ def test_bomber_inspect(): def test_datasource_or_bomber(with_nimd_env): - pkg_def = dict( - relpath='pkg') + pkg_def = dict(relpath='pkg') with TemporaryDirectory() as tmpdir: nibd.get_data_path = lambda: [tmpdir] ds = datasource_or_bomber(pkg_def) with pytest.raises(DataError): - getattr(ds, 'get_filename') + ds.get_filename('some_file.txt') pkg_dir = pjoin(tmpdir, 'pkg') os.mkdir(pkg_dir) tmpfile = pjoin(pkg_dir, 'config.ini') @@ -235,4 +233,4 @@ def test_datasource_or_bomber(with_nimd_env): pkg_def['min version'] = '0.3' ds = datasource_or_bomber(pkg_def) # not OK with pytest.raises(DataError): - getattr(ds, 'get_filename') + ds.get_filename('some_file.txt') From 92c758aebd75b7944c351b00cf39ba733782bc86 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 16:17:18 -0500 Subject: [PATCH 516/689] STY: Remove unused pkgutil import --- nibabel/optpkg.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/optpkg.py b/nibabel/optpkg.py index 3590cd3c00..81dae3010c 100644 --- a/nibabel/optpkg.py +++ b/nibabel/optpkg.py @@ -1,5 +1,4 @@ """ Routines to support optional packages """ -import pkgutil from distutils.version import LooseVersion from .tripwire import TripWire From 1e7bdd035590d4f5235b379f6385ce2bcfc753c8 Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 16:31:57 -0500 Subject: [PATCH 517/689] test script condition --- .azure-pipelines/windows.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 1d660089bb..008bbef248 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -40,12 +40,16 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - if $(CHECK_TYPE)=="nosetests" ( - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel - ) else ( - pytest --cov nibabel -v --pyargs nibabel --deselect streamlines - ) + nosetests --with-doctest --with-coverage --cover-package nibabel nibabel displayName: 'Nose tests' + condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) + - script: | + mkdir for_testing + cd for_testing + cp ../.coveragerc . + pytest --cov nibabel -v --pyargs nibabel --deselect streamlines + displayName: 'Pytest tests' + condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'test')) - script: | cd for_testing codecov From 2ef9ad4a8a998866d9f8fbadfaa44544226ff650 Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 16:46:55 -0500 Subject: [PATCH 518/689] reintroduce nosetests into matrix --- azure-pipelines.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 5bbd37986c..0b37903121 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -38,3 +38,7 @@ jobs: PYTHON_VERSION: '3.6' PYTHON_ARCH: 'x64' CHECK_TYPE: 'test' + nosetests: + PYTHON_VERSION: '3.6' + PYTHON_ARCH: 'x64' + CHECK_TYPE: 'nosetests' \ No newline at end of file From 6817eb6b899a4a1deefa08e21437844f3f05848b Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 16:48:28 -0500 Subject: [PATCH 519/689] remove unnecessary dependency --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 34c8456a04..fe6d91bdd3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -59,7 +59,6 @@ nosetests = coverage nose >=0.11 pytest - pytest-cov test = coverage nose >=0.11 From 2d0dc89542e0be1c7c1476f1d4fdaf31f168031d Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 16:54:03 -0500 Subject: [PATCH 520/689] pytest is already tested --- azure-pipelines.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 67d859c09b..2ef2539c74 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -34,10 +34,6 @@ jobs: py38-x64: PYTHON_VERSION: '3.8' PYTHON_ARCH: 'x64' - pytest: - PYTHON_VERSION: '3.6' - PYTHON_ARCH: 'x64' - CHECK_TYPE: 'test' nosetests: PYTHON_VERSION: '3.6' PYTHON_ARCH: 'x64' From 172598588072958887e8dbde4a1efd75252c90af Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 17:06:04 -0500 Subject: [PATCH 521/689] nose test inclusions --- .azure-pipelines/windows.yml | 59 +++++++++++++++++++++++++++++++++++- 1 file changed, 58 insertions(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index e0d0bdf651..8dcf0e24d8 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -40,7 +40,64 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel + nosetests --with-doctest --with-coverage --cover-package nibabel nibabel \ + -I test_array_sequence \ + -I test_tractogram \ + -I test_api_validators \ + -I test_arrayproxy \ + -I test_arraywriters \ + -I test_batteryrunners \ + -I test_brikhead \ + -I test_casting \ + -I test_cifti2io_header \ + -I test_data \ + -I test_deprecated \ + -I test_deprecator \ + -I test_dft \ + -I test_ecat \ + -I test_ecat_data \ + -I test_endiancodes \ + -I test_environment \ + -I test_euler \ + -I test_filebasedimages \ + -I test_filehandles \ + -I test_fileholders \ + -I test_filename_parser \ + -I test_files_interface \ + -I test_fileslice \ + -I test_fileutils \ + -I test_floating \ + -I test_funcs \ + -I test_h5py_compat \ + -I test_image_api \ + -I test_image_load_save \ + -I test_image_types \ + -I test_imageclasses \ + -I test_imageglobals \ + -I test_keywordonly \ + -I test_loadsave \ + -I test_minc1 \ + -I test_minc2 \ + -I test_minc2_data \ + -I test_mriutils \ + -I test_nibabel_data \ + -I test_nifti1 \ + -I test_nifti2 \ + -I test_openers \ + -I test_optpkg \ + -I test_orientations \ + -I test_parrec \ + -I test_parrec_data \ + -I test_pkg_info \ + -I test_processing \ + -I test_proxy_api \ + -I test_quaternions \ + -I test_recoder \ + -I test_remmovalschedule \ + -I test_round_trip \ + -I test_rstutils \ + -I test_scaling \ + -I test_wrapstruct displayName: 'Nose tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) - script: | From dba6f59ecf72e1ab44495b1414f68763dce43dce Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 17:07:58 -0500 Subject: [PATCH 522/689] changed from numpy raises to pytest.raises and skipif --- nibabel/freesurfer/tests/test_io.py | 65 ++++++++++++++--------------- 1 file changed, 31 insertions(+), 34 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 41bcdd17cf..cde2edeb5f 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -12,7 +12,7 @@ import pytest import numpy as np -from numpy.testing import assert_equal, assert_raises, dec, assert_allclose, assert_array_equal +from numpy.testing import assert_allclose, assert_array_equal from .. import (read_geometry, read_morph_data, read_annot, read_label, write_geometry, write_morph_data, write_annot) @@ -20,7 +20,7 @@ from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...fileslice import strided_scalar -from ...testing import clear_and_catch_warnings +from ...testing_pytest import clear_and_catch_warnings DATA_SDIR = 'fsaverage' @@ -36,10 +36,7 @@ data_path = pjoin(nib_data, 'nitest-freesurfer', DATA_SDIR) have_freesurfer = isdir(data_path) -freesurfer_test = dec.skipif( - not have_freesurfer, - 'cannot find freesurfer {0} directory'.format(DATA_SDIR)) - +freesurfer_test = pytest.mark.skipif(not have_freesurfer, reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) def _hash_file_content(fname): hasher = hashlib.md5() @@ -54,19 +51,18 @@ def test_geometry(): """Test IO of .surf""" surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "inflated")) coords, faces = read_geometry(surf_path) - assert_equal(0, faces.min()) - assert_equal(coords.shape[0], faces.max() + 1) + assert 0==faces.min() + assert coords.shape[0]== faces.max() + 1 surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "sphere")) coords, faces, volume_info, create_stamp = read_geometry( surf_path, read_metadata=True, read_stamp=True) - assert_equal(0, faces.min()) - assert_equal(coords.shape[0], faces.max() + 1) - assert_equal(9, len(volume_info)) - assert_equal([2, 0, 20], volume_info['head']) - assert_equal(u'created by greve on Thu Jun 8 19:17:51 2006', - create_stamp) + assert 0 == faces.min() + assert coords.shape[0] == faces.max() + 1 + assert 9 == len(volume_info) + assert [2, 0, 20] == volume_info['head'] + assert 'created by greve on Thu Jun 8 19:17:51 2006' == create_stamp # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -83,7 +79,7 @@ def test_geometry(): for key in ('xras', 'yras', 'zras', 'cras'): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) - assert_equal(volume_info2['cras'], volume_info['cras']) + assert volume_info2['cras'] == volume_info['cras'] with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) read_create_stamp = fobj.readline().decode().rstrip('\n') @@ -101,10 +97,11 @@ def test_geometry(): write_geometry(surf_path, coords, faces, create_stamp, volume_info) assert(any('Unknown extension' in str(ww.message) for ww in w)) volume_info['a'] = 0 - assert_raises(ValueError, write_geometry, surf_path, coords, - faces, create_stamp, volume_info) + with pytest.raises(ValueError): + write_geometry(surf_path, coords, faces, create_stamp, volume_info) + - assert_equal(create_stamp, read_create_stamp) + assert create_stamp == read_create_stamp np.testing.assert_array_equal(coords, coords2) np.testing.assert_array_equal(faces, faces2) @@ -123,14 +120,14 @@ def test_quad_geometry(): new_quad = pjoin(get_nibabel_data(), 'nitest-freesurfer', 'subjects', 'bert', 'surf', 'lh.inflated.nofix') coords, faces = read_geometry(new_quad) - assert_equal(0, faces.min()) - assert_equal(coords.shape[0], faces.max() + 1) + assert 0 == faces.min() + assert coords.shape[0] == faces.max() + 1 with InTemporaryDirectory(): new_path = 'test' write_geometry(new_path, coords, faces) coords2, faces2 = read_geometry(new_path) - assert_equal(coords, coords2) - assert_equal(faces, faces2) + assert coords == coords2 + assert faces == faces2 @freesurfer_test @@ -144,7 +141,7 @@ def test_morph_data(): new_path = 'test' write_morph_data(new_path, curv) curv2 = read_morph_data(new_path) - assert_equal(curv2, curv) + assert curv2 == curv def test_write_morph_data(): @@ -157,17 +154,17 @@ def test_write_morph_data(): for shape in okay_shapes: write_morph_data('test.curv', values.reshape(shape)) # Check ordering is preserved, regardless of shape - assert_equal(values, read_morph_data('test.curv')) - assert_raises(ValueError, write_morph_data, 'test.curv', - np.zeros(shape), big_num) - # Windows 32-bit overflows Python int + assert values == read_morph_data('test.curv') + with pytest.raises(ValueError): + write_morph_data('test.curv', np.zeros(shape), big_num) + # Windows 32-bit overflows Python int if np.dtype(np.int) != np.dtype(np.int32): - assert_raises(ValueError, write_morph_data, 'test.curv', - strided_scalar((big_num,))) + with pytest.raises(ValueError): + write_morph_data('test.curv', strided_scalar((big_num,))) for shape in bad_shapes: - assert_raises(ValueError, write_morph_data, 'test.curv', - values.reshape(shape)) - + with pytest.raises(ValueError): + write_morph_data('test.curv', values.reshape(shape)) + @freesurfer_test def test_annot(): @@ -208,7 +205,7 @@ def test_annot(): if labels_orig is not None: np.testing.assert_array_equal(labels_orig, labels_orig_2) np.testing.assert_array_equal(ctab, ctab2) - assert_equal(names, names2) + assert names == names2 def test_read_write_annot(): @@ -374,4 +371,4 @@ def test_write_annot_maxstruct(): # Check round-trip assert_array_equal(labels, rt_labels) assert_array_equal(rgba, rt_ctab[:, :4]) - assert_equal(names, [n.decode('ascii') for n in rt_names]) + assert names == [n.decode('ascii') for n in rt_names] From b2676d80f7d13ca836278d221fc1743767c54bd7 Mon Sep 17 00:00:00 2001 From: Anibal Solon Date: Tue, 4 Feb 2020 17:19:55 -0500 Subject: [PATCH 523/689] windows breaklines..? --- .azure-pipelines/windows.yml | 116 +++++++++++++++++------------------ 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 8dcf0e24d8..e64231c447 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -40,64 +40,64 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel \ - -I test_array_sequence \ - -I test_tractogram \ - -I test_api_validators \ - -I test_arrayproxy \ - -I test_arraywriters \ - -I test_batteryrunners \ - -I test_brikhead \ - -I test_casting \ - -I test_cifti2io_header \ - -I test_data \ - -I test_deprecated \ - -I test_deprecator \ - -I test_dft \ - -I test_ecat \ - -I test_ecat_data \ - -I test_endiancodes \ - -I test_environment \ - -I test_euler \ - -I test_filebasedimages \ - -I test_filehandles \ - -I test_fileholders \ - -I test_filename_parser \ - -I test_files_interface \ - -I test_fileslice \ - -I test_fileutils \ - -I test_floating \ - -I test_funcs \ - -I test_h5py_compat \ - -I test_image_api \ - -I test_image_load_save \ - -I test_image_types \ - -I test_imageclasses \ - -I test_imageglobals \ - -I test_keywordonly \ - -I test_loadsave \ - -I test_minc1 \ - -I test_minc2 \ - -I test_minc2_data \ - -I test_mriutils \ - -I test_nibabel_data \ - -I test_nifti1 \ - -I test_nifti2 \ - -I test_openers \ - -I test_optpkg \ - -I test_orientations \ - -I test_parrec \ - -I test_parrec_data \ - -I test_pkg_info \ - -I test_processing \ - -I test_proxy_api \ - -I test_quaternions \ - -I test_recoder \ - -I test_remmovalschedule \ - -I test_round_trip \ - -I test_rstutils \ - -I test_scaling \ - -I test_wrapstruct + nosetests --with-doctest --with-coverage --cover-package nibabel nibabel ^ + -I test_array_sequence ^ + -I test_tractogram ^ + -I test_api_validators ^ + -I test_arrayproxy ^ + -I test_arraywriters ^ + -I test_batteryrunners ^ + -I test_brikhead ^ + -I test_casting ^ + -I test_cifti2io_header ^ + -I test_data ^ + -I test_deprecated ^ + -I test_deprecator ^ + -I test_dft ^ + -I test_ecat ^ + -I test_ecat_data ^ + -I test_endiancodes ^ + -I test_environment ^ + -I test_euler ^ + -I test_filebasedimages ^ + -I test_filehandles ^ + -I test_fileholders ^ + -I test_filename_parser ^ + -I test_files_interface ^ + -I test_fileslice ^ + -I test_fileutils ^ + -I test_floating ^ + -I test_funcs ^ + -I test_h5py_compat ^ + -I test_image_api ^ + -I test_image_load_save ^ + -I test_image_types ^ + -I test_imageclasses ^ + -I test_imageglobals ^ + -I test_keywordonly ^ + -I test_loadsave ^ + -I test_minc1 ^ + -I test_minc2 ^ + -I test_minc2_data ^ + -I test_mriutils ^ + -I test_nibabel_data ^ + -I test_nifti1 ^ + -I test_nifti2 ^ + -I test_openers ^ + -I test_optpkg ^ + -I test_orientations ^ + -I test_parrec ^ + -I test_parrec_data ^ + -I test_pkg_info ^ + -I test_processing ^ + -I test_proxy_api ^ + -I test_quaternions ^ + -I test_recoder ^ + -I test_remmovalschedule ^ + -I test_round_trip ^ + -I test_rstutils ^ + -I test_scaling ^ + -I test_wrapstruct displayName: 'Nose tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) - script: | From 406e6dc1adfe8f65158f425004209dcd93210225 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 17:28:50 -0500 Subject: [PATCH 524/689] Changed test_mghformat.py to pytest --- nibabel/freesurfer/tests/test_mghformat.py | 154 +++++++++++---------- 1 file changed, 79 insertions(+), 75 deletions(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 25a7254def..3d96aa60f6 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -26,12 +26,11 @@ import pytest -from numpy.testing import (assert_equal, assert_array_equal, - assert_array_almost_equal, assert_almost_equal, - assert_raises) -from ...testing import assert_not_equal +from numpy.testing import (assert_array_equal, + assert_array_almost_equal, assert_almost_equal) -from ...testing import data_path + +from ...testing_pytest import data_path from ...tests import test_spatialimages as tsi from ...tests.test_wrapstruct import _TestLabeledWrapStruct @@ -68,10 +67,10 @@ def test_read_mgh(): # header h = mgz.header - assert_equal(h['version'], 1) - assert_equal(h['type'], 3) - assert_equal(h['dof'], 0) - assert_equal(h['goodRASFlag'], 1) + assert h['version'] == 1 + assert h['type'] == 3 + assert h['dof'] == 0 + assert h['goodRASFlag'] == 1 assert_array_equal(h['dims'], [3, 4, 5, 2]) assert_almost_equal(h['tr'], 2.0) assert_almost_equal(h['flip_angle'], 0.0) @@ -102,10 +101,10 @@ def test_write_mgh(): # Delete loaded image to allow file deletion by windows del mgz # header - assert_equal(h['version'], 1) - assert_equal(h['type'], 3) - assert_equal(h['dof'], 0) - assert_equal(h['goodRASFlag'], 1) + assert h['version'] == 1 + assert h['type'] == 3 + assert h['dof'] == 0 + assert h['goodRASFlag'] == 1 assert_array_equal(h['dims'], [5, 4, 3, 2]) assert_almost_equal(h['tr'], 0.0) assert_almost_equal(h['flip_angle'], 0.0) @@ -132,10 +131,10 @@ def test_write_noaffine_mgh(): # Delete loaded image to allow file deletion by windows del mgz # header - assert_equal(h['version'], 1) - assert_equal(h['type'], 0) # uint8 for mgh - assert_equal(h['dof'], 0) - assert_equal(h['goodRASFlag'], 1) + assert h['version'] == 1 + assert h['type'] == 0 # uint8 for mgh + assert h['dof'] == 0 + assert h['goodRASFlag'] == 1 assert_array_equal(h['dims'], [7, 13, 3, 22]) assert_almost_equal(h['tr'], 0.0) assert_almost_equal(h['flip_angle'], 0.0) @@ -158,7 +157,7 @@ def test_set_zooms(): (1, 1, -1, 1), (1, 1, 1, -1), (1, 1, 1, 1, 5)): - with assert_raises(HeaderDataError): + with pytest.raises(HeaderDataError): h.set_zooms(zooms) # smoke test for tr=0 h.set_zooms((1, 1, 1, 0)) @@ -178,7 +177,8 @@ def bad_dtype_mgh(): def test_bad_dtype_mgh(): # Now test the above function - assert_raises(MGHError, bad_dtype_mgh) + with pytest.raises(MGHError): + bad_dtype_mgh() def test_filename_exts(): @@ -219,14 +219,14 @@ def test_header_updating(): assert_almost_equal(mgz.affine, exp_aff, 6) assert_almost_equal(hdr.get_affine(), exp_aff, 6) # Test that initial wonky header elements have not changed - assert_equal(hdr['delta'], 1) + assert hdr['delta'] == 1 assert_almost_equal(hdr['Mdc'].T, exp_aff[:3, :3]) # Save, reload, same thing img_fobj = io.BytesIO() mgz2 = _mgh_rt(mgz, img_fobj) hdr2 = mgz2.header assert_almost_equal(hdr2.get_affine(), exp_aff, 6) - assert_equal(hdr2['delta'], 1) + assert hdr2['delta'] == 1 # Change affine, change underlying header info exp_aff_d = exp_aff.copy() exp_aff_d[0, -1] = -14 @@ -259,17 +259,17 @@ def test_eq(): # Test headers compare properly hdr = MGHHeader() hdr2 = MGHHeader() - assert_equal(hdr, hdr2) + assert hdr == hdr2 hdr.set_data_shape((2, 3, 4)) assert(hdr != hdr2) hdr2.set_data_shape((2, 3, 4)) - assert_equal(hdr, hdr2) + assert hdr == hdr2 def test_header_slope_inter(): # Test placeholder slope / inter method hdr = MGHHeader() - assert_equal(hdr.get_slope_inter(), (None, None)) + assert hdr.get_slope_inter() == (None, None) def test_mgh_load_fileobj(): @@ -281,7 +281,7 @@ def test_mgh_load_fileobj(): # pass the filename to the array proxy, please feel free to change this # test. img = MGHImage.load(MGZ_FNAME) - assert_equal(img.dataobj.file_like, MGZ_FNAME) + assert img.dataobj.file_like == MGZ_FNAME # Check fileobj also passed into dataobj with ImageOpener(MGZ_FNAME) as fobj: contents = fobj.read() @@ -296,7 +296,7 @@ def test_mgh_affine_default(): hdr = MGHHeader() hdr['goodRASFlag'] = 0 hdr2 = MGHHeader(hdr.binaryblock) - assert_equal(hdr2['goodRASFlag'], 1) + assert hdr2['goodRASFlag'] == 1 assert_array_equal(hdr['Mdc'], hdr2['Mdc']) assert_array_equal(hdr['Pxyz_c'], hdr2['Pxyz_c']) @@ -311,33 +311,33 @@ def test_mgh_set_data_shape(): assert_array_equal(hdr.get_data_shape(), (5, 4, 3)) hdr.set_data_shape((5, 4, 3, 2)) assert_array_equal(hdr.get_data_shape(), (5, 4, 3, 2)) - with assert_raises(ValueError): + with pytest.raises(ValueError): hdr.set_data_shape((5, 4, 3, 2, 1)) def test_mghheader_default_structarr(): hdr = MGHHeader.default_structarr() - assert_equal(hdr['version'], 1) + assert hdr['version'] == 1 assert_array_equal(hdr['dims'], 1) - assert_equal(hdr['type'], 3) - assert_equal(hdr['dof'], 0) - assert_equal(hdr['goodRASFlag'], 1) + assert hdr['type'] == 3 + assert hdr['dof'] == 0 + assert hdr['goodRASFlag'] == 1 assert_array_equal(hdr['delta'], 1) assert_array_equal(hdr['Mdc'], [[-1, 0, 0], [0, 0, 1], [0, -1, 0]]) assert_array_equal(hdr['Pxyz_c'], 0) - assert_equal(hdr['tr'], 0) - assert_equal(hdr['flip_angle'], 0) - assert_equal(hdr['te'], 0) - assert_equal(hdr['ti'], 0) - assert_equal(hdr['fov'], 0) + assert hdr['tr'] ==0 + assert hdr['flip_angle'] == 0 + assert hdr['te'] == 0 + assert hdr['ti'] == 0 + assert hdr['fov'] == 0 for endianness in (None,) + BIG_CODES: hdr2 = MGHHeader.default_structarr(endianness=endianness) - assert_equal(hdr2, hdr) - assert_equal(hdr2.newbyteorder('>'), hdr) + assert hdr2 == hdr + assert hdr2.newbyteorder('>') == hdr for endianness in LITTLE_CODES: - with assert_raises(ValueError): + with pytest.raises(ValueError): MGHHeader.default_structarr(endianness=endianness) @@ -352,17 +352,17 @@ def test_deprecated_fields(): hdr['mrparams'] = [1, 2, 3, 4] assert_array_almost_equal(hdr['mrparams'], [1, 2, 3, 4]) - assert_equal(hdr['tr'], 1) - assert_equal(hdr['flip_angle'], 2) - assert_equal(hdr['te'], 3) - assert_equal(hdr['ti'], 4) - assert_equal(hdr['fov'], 0) + assert hdr['tr'] == 1 + assert hdr['flip_angle'] == 2 + assert hdr['te'] == 3 + assert hdr['ti'] == 4 + assert hdr['fov'] == 0 assert_array_almost_equal(hdr_data['mrparams'], [1, 2, 3, 4]) - assert_equal(hdr_data['tr'], 1) - assert_equal(hdr_data['flip_angle'], 2) - assert_equal(hdr_data['te'], 3) - assert_equal(hdr_data['ti'], 4) - assert_equal(hdr_data['fov'], 0) + assert hdr_data['tr'] == 1 + assert hdr_data['flip_angle'] == 2 + assert hdr_data['te'] == 3 + assert hdr_data['ti'] == 4 + assert hdr_data['fov'] == 0 hdr['tr'] = 5 hdr['flip_angle'] = 6 @@ -389,7 +389,7 @@ def check_dtypes(self, expected, actual): # Some images will want dtypes to be equal including endianness, # others may only require the same type # MGH requires the actual to be a big endian version of expected - assert_equal(expected.newbyteorder('>'), actual) + assert expected.newbyteorder('>') == actual class TestMGHHeader(_TestLabeledWrapStruct): @@ -406,9 +406,9 @@ def test_general_init(self): hdr = self.header_class() # binaryblock has length given by header data dtype binblock = hdr.binaryblock - assert_equal(len(binblock), hdr.structarr.dtype.itemsize) + assert len(binblock) == hdr.structarr.dtype.itemsize # Endianness will always be big, and cannot be set - assert_equal(hdr.endianness, '>') + assert hdr.endianness == '>' # You can also pass in a check flag, without data this has no # effect hdr = self.header_class(check=False) @@ -417,15 +417,15 @@ def test__eq__(self): # Test equal and not equal hdr1 = self.header_class() hdr2 = self.header_class() - assert_equal(hdr1, hdr2) + assert hdr1 == hdr2 self._set_something_into_hdr(hdr1) - assert_not_equal(hdr1, hdr2) + assert hdr1 != hdr2 self._set_something_into_hdr(hdr2) - assert_equal(hdr1, hdr2) + assert hdr1 == hdr2 # REMOVED as_byteswapped() test # Check comparing to funny thing says no - assert_not_equal(hdr1, None) - assert_not_equal(hdr1, 1) + assert hdr1 != None + assert hdr1 != 1 def test_to_from_fileobj(self): # Successful write using write_to @@ -434,56 +434,58 @@ def test_to_from_fileobj(self): hdr.write_to(str_io) str_io.seek(0) hdr2 = self.header_class.from_fileobj(str_io) - assert_equal(hdr2.endianness, '>') - assert_equal(hdr2.binaryblock, hdr.binaryblock) + assert hdr2.endianness == '>' + assert hdr2.binaryblock == hdr.binaryblock def test_endian_guess(self): # Check guesses of endian eh = self.header_class() - assert_equal(eh.endianness, '>') - assert_equal(self.header_class.guessed_endian(eh), '>') + assert eh.endianness == '>' + assert self.header_class.guessed_endian(eh) == '>' def test_bytes(self): # Test get of bytes hdr1 = self.header_class() bb = hdr1.binaryblock hdr2 = self.header_class(hdr1.binaryblock) - assert_equal(hdr1, hdr2) - assert_equal(hdr1.binaryblock, hdr2.binaryblock) + assert hdr1 == hdr2 + assert hdr1.binaryblock == hdr2.binaryblock # Do a set into the header, and try again. The specifics of 'setting # something' will depend on the nature of the bytes object self._set_something_into_hdr(hdr1) hdr2 = self.header_class(hdr1.binaryblock) - assert_equal(hdr1, hdr2) - assert_equal(hdr1.binaryblock, hdr2.binaryblock) + assert hdr1 == hdr2 + assert hdr1.binaryblock == hdr2.binaryblock # Short binaryblocks give errors (here set through init) # Long binaryblocks are truncated - assert_raises(WrapStructError, - self.header_class, - bb[:self.header_class._hdrdtype.itemsize - 1]) + with pytest.raises(WrapStructError): + self.header_class(bb[:self.header_class._hdrdtype.itemsize - 1]) + # Checking set to true by default, and prevents nonsense being # set into the header. bb_bad = self.get_bad_bb() if bb_bad is None: return with imageglobals.LoggingOutputSuppressor(): - assert_raises(HeaderDataError, self.header_class, bb_bad) + with pytest.raises(HeaderDataError): + self.header_class(bb_bad) + # now slips past without check _ = self.header_class(bb_bad, check=False) def test_as_byteswapped(self): # Check byte swapping hdr = self.header_class() - assert_equal(hdr.endianness, '>') + assert hdr.endianness == '>' # same code just returns a copy for endianness in BIG_CODES: hdr2 = hdr.as_byteswapped(endianness) assert(hdr2 is not hdr) - assert_equal(hdr2, hdr) + assert hdr2 == hdr # Different code raises error for endianness in (None,) + LITTLE_CODES: - with assert_raises(ValueError): + with pytest.raises(ValueError): hdr.as_byteswapped(endianness) # Note that contents is not rechecked on swap / copy class DC(self.header_class): @@ -491,7 +493,9 @@ def check_fix(self, *args, **kwargs): raise Exception # Assumes check=True default - assert_raises(Exception, DC, hdr.binaryblock) + with pytest.raises(Exception): + DC(hdr.binaryblock) + hdr = DC(hdr.binaryblock, check=False) hdr2 = hdr.as_byteswapped('>') @@ -500,8 +504,8 @@ def test_checks(self): hdr_t = self.header_class() # _dxer just returns the diagnostics as a string # Default hdr is OK - assert_equal(self._dxer(hdr_t), '') + assert self._dxer(hdr_t) == '' # Version should be 1 hdr = hdr_t.copy() hdr['version'] = 2 - assert_equal(self._dxer(hdr), 'Unknown MGH format version') + assert self._dxer(hdr) == 'Unknown MGH format version' From e1bb16df9aea89f587070a9dd527991f46c6c491 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 17:55:23 -0500 Subject: [PATCH 525/689] small fixes --- nibabel/freesurfer/tests/test_io.py | 34 +++++++++++----------- nibabel/freesurfer/tests/test_mghformat.py | 3 +- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index cde2edeb5f..77a8bc12a0 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -51,18 +51,18 @@ def test_geometry(): """Test IO of .surf""" surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "inflated")) coords, faces = read_geometry(surf_path) - assert 0==faces.min() - assert coords.shape[0]== faces.max() + 1 + assert 0 == faces.min() + assert coords.shape[0] == faces.max() + 1 surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "sphere")) coords, faces, volume_info, create_stamp = read_geometry( surf_path, read_metadata=True, read_stamp=True) assert 0 == faces.min() - assert coords.shape[0] == faces.max() + 1 + assert coords.shape[0] == (faces.max() + 1) assert 9 == len(volume_info) assert [2, 0, 20] == volume_info['head'] - assert 'created by greve on Thu Jun 8 19:17:51 2006' == create_stamp + assert ['created by greve on Thu Jun 8 19:17:51 2006'] == create_stamp # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -121,7 +121,7 @@ def test_quad_geometry(): 'bert', 'surf', 'lh.inflated.nofix') coords, faces = read_geometry(new_quad) assert 0 == faces.min() - assert coords.shape[0] == faces.max() + 1 + assert coords.shape[0] == (faces.max() + 1) with InTemporaryDirectory(): new_path = 'test' write_geometry(new_path, coords, faces) @@ -135,8 +135,8 @@ def test_morph_data(): """Test IO of morphometry data file (eg. curvature).""" curv_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "curv")) curv = read_morph_data(curv_path) - assert(-1.0 < curv.min() < 0) - assert(0 < curv.max() < 1.0) + assert -1.0 < curv.min() < 0 + assert 0 < curv.max() < 1.0 with InTemporaryDirectory(): new_path = 'test' write_morph_data(new_path, curv) @@ -175,8 +175,8 @@ def test_annot(): hash_ = _hash_file_content(annot_path) labels, ctab, names = read_annot(annot_path) - assert(labels.shape == (163842, )) - assert(ctab.shape == (len(names), 5)) + assert labels.shape == (163842, ) + assert ctab.shape == (len(names), 5) labels_orig = None if a == 'aparc': @@ -184,9 +184,9 @@ def test_annot(): np.testing.assert_array_equal(labels == -1, labels_orig == 0) # Handle different version of fsaverage if hash_ == 'bf0b488994657435cdddac5f107d21e8': - assert(np.sum(labels_orig == 0) == 13887) + assert np.sum(labels_orig == 0) == 13887 elif hash_ == 'd4f5b7cbc2ed363ac6fcf89e19353504': - assert(np.sum(labels_orig == 1639705) == 13327) + assert np.sum(labels_orig == 1639705) == 13327 else: raise RuntimeError("Unknown freesurfer file. Please report " "the problem to the maintainer of nibabel.") @@ -270,7 +270,7 @@ def test_write_annot_fill_ctab(): print(labels) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert( + assert ( any('Annotation values in {} will be incorrect'.format( annot_path) == str(ww.message) for ww in w)) labels2, rgbal2, names2 = read_annot(annot_path, orig_ids=True) @@ -346,13 +346,13 @@ def test_label(): label_path = pjoin(data_path, "label", "lh.cortex.label") label = read_label(label_path) # XXX : test more - assert(label.min() >= 0) - assert(label.max() <= 163841) - assert(label.shape[0] <= 163842) + assert label.min() >= 0 + assert label.max() <= 163841 + assert label.shape[0] <= 163842 labels, scalars = read_label(label_path, True) - assert(np.all(labels == label)) - assert(len(labels) == len(scalars)) + assert (np.all(labels == label)) + assert len(labels) == len(scalars) def test_write_annot_maxstruct(): diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 3d96aa60f6..37b2faa2b4 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -26,8 +26,7 @@ import pytest -from numpy.testing import (assert_array_equal, - assert_array_almost_equal, assert_almost_equal) +from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) from ...testing_pytest import data_path From e763c896f72b488e1163fb9450147d4de0de8805 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 21:57:25 -0500 Subject: [PATCH 526/689] TEST: Refactor NetCDF tests to be more pytest friendly --- .azure-pipelines/windows.yml | 1 + .travis.yml | 1 + nibabel/externals/tests/test_netcdf.py | 125 +++++++++++-------------- 3 files changed, 55 insertions(+), 72 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index e64231c447..543a672e9f 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -80,6 +80,7 @@ jobs: -I test_minc2 ^ -I test_minc2_data ^ -I test_mriutils ^ + -I test_netcdf ^ -I test_nibabel_data ^ -I test_nifti1 ^ -I test_nifti2 ^ diff --git a/.travis.yml b/.travis.yml index f8333d39a7..792e88a907 100644 --- a/.travis.yml +++ b/.travis.yml @@ -173,6 +173,7 @@ script: -I test_minc2 \ -I test_minc2_data \ -I test_mriutils \ + -I test_netcdf \ -I test_nibabel_data \ -I test_nifti1 \ -I test_nifti2 \ diff --git a/nibabel/externals/tests/test_netcdf.py b/nibabel/externals/tests/test_netcdf.py index 289e6791c1..f85393be4e 100644 --- a/nibabel/externals/tests/test_netcdf.py +++ b/nibabel/externals/tests/test_netcdf.py @@ -2,20 +2,15 @@ import os from os.path import join as pjoin, dirname -import shutil -import tempfile -import time -import sys from io import BytesIO from glob import glob from contextlib import contextmanager import numpy as np -from numpy.testing import dec, assert_ -from ..netcdf import netcdf_file +import pytest -from nose.tools import assert_true, assert_false, assert_equal, assert_raises +from ..netcdf import netcdf_file TEST_DATA_PATH = pjoin(dirname(__file__), 'data') @@ -36,54 +31,41 @@ def make_simple(*args, **kwargs): f.close() -def gen_for_simple(ncfileobj): - ''' Generator for example fileobj tests ''' - yield assert_equal, ncfileobj.history, b'Created for a test' +def assert_simple_truths(ncfileobj): + assert ncfileobj.history == b'Created for a test' time = ncfileobj.variables['time'] - yield assert_equal, time.units, b'days since 2008-01-01' - yield assert_equal, time.shape, (N_EG_ELS,) - yield assert_equal, time[-1], N_EG_ELS-1 - - -def test_read_write_files(): - # test round trip for example file - cwd = os.getcwd() - try: - tmpdir = tempfile.mkdtemp() - os.chdir(tmpdir) - with make_simple('simple.nc', 'w') as f: - pass - # To read the NetCDF file we just created:: - with netcdf_file('simple.nc') as f: - # Using mmap is the default - yield assert_true, f.use_mmap - for testargs in gen_for_simple(f): - yield testargs - - # Now without mmap - with netcdf_file('simple.nc', mmap=False) as f: - # Using mmap is the default - yield assert_false, f.use_mmap - for testargs in gen_for_simple(f): - yield testargs - - # To read the NetCDF file we just created, as file object, no - # mmap. When n * n_bytes(var_type) is not divisible by 4, this - # raised an error in pupynere 1.0.12 and scipy rev 5893, because - # calculated vsize was rounding up in units of 4 - see - # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html - fobj = open('simple.nc', 'rb') - with netcdf_file(fobj) as f: - # by default, don't use mmap for file-like - yield assert_false, f.use_mmap - for testargs in gen_for_simple(f): - yield testargs - except: - os.chdir(cwd) - shutil.rmtree(tmpdir) - raise - os.chdir(cwd) - shutil.rmtree(tmpdir) + assert time.units == b'days since 2008-01-01' + assert time.shape == (N_EG_ELS,) + assert time[-1] == N_EG_ELS - 1 + + +def test_read_write_files(tmp_path): + fname = str(tmp_path / 'simple.nc') + + with make_simple(fname, 'w') as f: + pass + # To read the NetCDF file we just created:: + with netcdf_file(fname) as f: + # Using mmap is the default + assert f.use_mmap + assert_simple_truths(f) + + # Now without mmap + with netcdf_file(fname, mmap=False) as f: + # Using mmap is the default + assert not f.use_mmap + assert_simple_truths(f) + + # To read the NetCDF file we just created, as file object, no + # mmap. When n * n_bytes(var_type) is not divisible by 4, this + # raised an error in pupynere 1.0.12 and scipy rev 5893, because + # calculated vsize was rounding up in units of 4 - see + # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html + fobj = open(fname, 'rb') + with netcdf_file(fobj) as f: + # by default, don't use mmap for file-like + assert not f.use_mmap + assert_simple_truths(f) def test_read_write_sio(): @@ -93,12 +75,12 @@ def test_read_write_sio(): eg_sio2 = BytesIO(str_val) with netcdf_file(eg_sio2) as f2: - for testargs in gen_for_simple(f2): - yield testargs + assert_simple_truths(f2) # Test that error is raised if attempting mmap for sio eg_sio3 = BytesIO(str_val) - yield assert_raises, ValueError, netcdf_file, eg_sio3, 'r', True + with pytest.raises(ValueError): + netcdf_file(eg_sio3, 'r', True) # Test 64-bit offset write / read eg_sio_64 = BytesIO() with make_simple(eg_sio_64, 'w', version=2) as f_64: @@ -106,15 +88,13 @@ def test_read_write_sio(): eg_sio_64 = BytesIO(str_val) with netcdf_file(eg_sio_64) as f_64: - for testargs in gen_for_simple(f_64): - yield testargs - yield assert_equal, f_64.version_byte, 2 + assert_simple_truths(f_64) + assert f_64.version_byte == 2 # also when version 2 explicitly specified eg_sio_64 = BytesIO(str_val) with netcdf_file(eg_sio_64, version=2) as f_64: - for testargs in gen_for_simple(f_64): - yield testargs - yield assert_equal, f_64.version_byte, 2 + assert_simple_truths(f_64) + assert f_64.version_byte == 2 def test_read_example_data(): @@ -134,7 +114,8 @@ def test_itemset_no_segfault_on_readonly(): time_var = f.variables['time'] # time_var.assignValue(42) should raise a RuntimeError--not seg. fault! - assert_raises(RuntimeError, time_var.assignValue, 42) + with pytest.raises(RuntimeError): + time_var.assignValue(42) def test_write_invalid_dtype(): @@ -147,14 +128,14 @@ def test_write_invalid_dtype(): with netcdf_file(BytesIO(), 'w') as f: f.createDimension('time', N_EG_ELS) for dt in dtypes: - yield assert_raises, ValueError, \ - f.createVariable, 'time', dt, ('time',) + with pytest.raises(ValueError): + f.createVariable('time', dt, ('time',)) def test_flush_rewind(): stream = BytesIO() with make_simple(stream, mode='w') as f: - x = f.createDimension('x',4) + x = f.createDimension('x', 4) v = f.createVariable('v', 'i2', ['x']) v[:] = 1 f.flush() @@ -162,7 +143,7 @@ def test_flush_rewind(): f.flush() len_double = len(stream.getvalue()) - assert_(len_single == len_double) + assert len_single == len_double def test_dtype_specifiers(): @@ -192,8 +173,8 @@ def test_ticket_1720(): io = BytesIO(contents) with netcdf_file(io, 'r') as f: - assert_equal(f.history, b'Created for a test') + assert f.history == b'Created for a test' float_var = f.variables['float_var'] - assert_equal(float_var.units, b'metres') - assert_equal(float_var.shape, (10,)) - assert_(np.allclose(float_var[:], items)) + assert float_var.units == b'metres' + assert float_var.shape == (10,) + assert np.allclose(float_var[:], items) From 61f6ecf197f9929de90e438fbbfbb2b6afafe6f1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 5 Feb 2020 10:36:04 -0500 Subject: [PATCH 527/689] CI: Resolve Travis warnings, remove redundant job --- .travis.yml | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index f8333d39a7..b2bc55eb80 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,11 +4,11 @@ # for it to be on multiple physical lines, so long as you remember: - There # can't be any leading "-"s - All newlines will be removed, so use ";"s +os: linux dist: xenial -sudo: true language: python - cache: pip + env: global: - SETUP_REQUIRES="pip setuptools>=30.3.0 wheel" @@ -26,7 +26,7 @@ python: - 3.7 - 3.8 -matrix: +jobs: include: # Old nosetests - Remove soon - python: 3.7 @@ -36,11 +36,6 @@ matrix: - python: 3.5 env: - DEPENDS="-r requirements.txt" - # Clean install - - python: 3.5 - env: - - DEPENDS="" - - CHECK_TYPE=skiptests # Absolute minimum dependencies - python: 3.5 env: @@ -110,7 +105,6 @@ install: fi # Basic import check - python -c 'import nibabel; print(nibabel.__version__)' - - if [ "$CHECK_TYPE" == "skiptests" ]; then exit 0; fi before_script: # Point to nibabel data directory From a411f338422e08b3f04efd061848a3b8bb87d084 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 22:11:13 -0500 Subject: [PATCH 528/689] TEST: Use skipif mark instead of TestMinc2API.__init__ --- nibabel/tests/test_image_api.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index ee8c287df2..c24afea2eb 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -712,12 +712,8 @@ class TestMinc1API(ImageHeaderAPI): example_images = MINC1_EXAMPLE_IMAGES +@pytest.mark.skipif(not have_h5py, reason="Need h5py for Minc2 tests") class TestMinc2API(TestMinc1API): - - def __init__(self): - if not have_h5py: - pytest.skip('Need h5py for these tests') - klass = image_maker = Minc2Image loader = minc2.load example_images = MINC2_EXAMPLE_IMAGES From e1da57c200577f07a085f464d3d9828ad6c1f123 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 22:11:55 -0500 Subject: [PATCH 529/689] TEST: Use standard syntax when possible --- nibabel/tests/test_image_api.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index c24afea2eb..738a3f1969 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -108,7 +108,7 @@ def validate_header(self, imaker, params): hdr = img.header # we can fetch it # Read only with pytest.raises(AttributeError): - setattr(img, 'header', hdr) + img.header = hdr def validate_header_deprecated(self, imaker, params): # Check deprecated header API @@ -164,9 +164,9 @@ def validate_filenames(self, imaker, params): def validate_no_slicing(self, imaker, params): img = imaker() with pytest.raises(TypeError): - img.__getitem__('string') + img['string'] with pytest.raises(TypeError): - img.__getitem__(slice(None)) + img[:] def validate_get_data_deprecated(self, imaker, params): # Check deprecated header API @@ -231,10 +231,10 @@ def validate_data_interface(self, imaker, params): # dataobj is read only fake_data = np.zeros(img.shape).astype(img.get_data_dtype()) with pytest.raises(AttributeError): - setattr(img, 'dataobj', fake_data) + img.dataobj = fake_data # So is in_memory with pytest.raises(AttributeError): - setattr(img, 'in_memory', False) + img.in_memory = False def _check_proxy_interface(self, imaker, meth_name): # Parameters assert this is an array proxy @@ -402,7 +402,7 @@ def validate_data_deprecated(self, imaker, params): # Check setting _data raises error fake_data = np.zeros(img.shape).astype(img.get_data_dtype()) with pytest.raises(AttributeError): - setattr(img, '_data', fake_data) + img._data = fake_data def validate_shape(self, imaker, params): # Validate shape @@ -414,7 +414,7 @@ def validate_shape(self, imaker, params): assert img.shape == params['data'].shape # Read only with pytest.raises(AttributeError): - setattr(img, 'shape', np.eye(4)) + img.shape = np.eye(4) def validate_ndim(self, imaker, params): # Validate shape @@ -426,7 +426,7 @@ def validate_ndim(self, imaker, params): assert img.ndim == params['data'].ndim # Read only with pytest.raises(AttributeError): - setattr(img, 'ndim', 5) + img.ndim = 5 def validate_shape_deprecated(self, imaker, params): # Check deprecated get_shape API @@ -497,7 +497,7 @@ def validate_affine(self, imaker, params): assert img.affine[0, 0] == 1.5 # Read only with pytest.raises(AttributeError): - setattr(img, 'affine', np.eye(4)) + img.affine = np.eye(4) def validate_affine_deprecated(self, imaker, params): # Check deprecated affine API From dc9d82e596239818f4fb92ad494f4ead06f08b69 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 22:17:59 -0500 Subject: [PATCH 530/689] TEST: Use relative imports in gifti tests --- nibabel/gifti/tests/test_gifti.py | 29 ++++++++++---------- nibabel/gifti/tests/test_parse_gifti_fast.py | 14 +++++----- 2 files changed, 21 insertions(+), 22 deletions(-) diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 8a0b3d327d..5cb70019d8 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -6,27 +6,26 @@ import numpy as np -import nibabel as nib -from nibabel.gifti import (GiftiImage, GiftiDataArray, GiftiLabel, - GiftiLabelTable, GiftiMetaData, GiftiNVPairs, - GiftiCoordSystem) -from nibabel.gifti.gifti import data_tag -from nibabel.nifti1 import data_type_codes -from nibabel.fileholders import FileHolder - -from numpy.testing import (assert_array_almost_equal, - assert_array_equal) +from ... import load +from .. import (GiftiImage, GiftiDataArray, GiftiLabel, + GiftiLabelTable, GiftiMetaData, GiftiNVPairs, + GiftiCoordSystem) +from ..gifti import data_tag +from ...nifti1 import data_type_codes +from ...fileholders import FileHolder + +from numpy.testing import assert_array_almost_equal, assert_array_equal import pytest -from nibabel.testing import clear_and_catch_warnings, test_data +from ...testing_pytest import clear_and_catch_warnings, test_data from .test_parse_gifti_fast import (DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6) import itertools def test_agg_data(): - surf_gii_img = nib.load(test_data('gifti', 'ascii.gii')) - func_gii_img = nib.load(test_data('gifti', 'task.func.gii')) - shape_gii_img = nib.load(test_data('gifti', 'rh.shape.curv.gii')) + surf_gii_img = load(test_data('gifti', 'ascii.gii')) + func_gii_img = load(test_data('gifti', 'task.func.gii')) + shape_gii_img = load(test_data('gifti', 'rh.shape.curv.gii')) # add timeseries data with intent code ``none`` point_data = surf_gii_img.get_arrays_from_intent('pointset')[0].data @@ -296,7 +295,7 @@ def assign_rgba(gl, val): def test_print_summary(): for fil in [DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6]: - gimg = nib.load(fil) + gimg = load(fil) gimg.print_summary() diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 15a2c69f1c..5e8150ac64 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -13,17 +13,17 @@ import numpy as np -import nibabel.gifti as gi -from nibabel.gifti.util import gifti_endian_codes -from nibabel.gifti.parse_gifti_fast import Outputter, parse_gifti_file -from nibabel.loadsave import load, save -from nibabel.nifti1 import xform_codes -from nibabel.tmpdirs import InTemporaryDirectory +from .. import gifti as gi +from ..util import gifti_endian_codes +from ..parse_gifti_fast import Outputter, parse_gifti_file +from ...loadsave import load, save +from ...nifti1 import xform_codes +from ...tmpdirs import InTemporaryDirectory from numpy.testing import assert_array_almost_equal import pytest -from ...testing import clear_and_catch_warnings +from ...testing_pytest import clear_and_catch_warnings IO_DATA_PATH = pjoin(dirname(__file__), 'data') From 42beb1a02d4eaf37b61c4623351ac4b0fb0160bb Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 22:32:59 -0500 Subject: [PATCH 531/689] TEST: Simplify giftiio deprecation tests --- nibabel/gifti/tests/test_giftiio.py | 33 ++++++++--------------------- 1 file changed, 9 insertions(+), 24 deletions(-) diff --git a/nibabel/gifti/tests/test_giftiio.py b/nibabel/gifti/tests/test_giftiio.py index 87d28b00c4..8269618b0c 100644 --- a/nibabel/gifti/tests/test_giftiio.py +++ b/nibabel/gifti/tests/test_giftiio.py @@ -7,31 +7,16 @@ # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -import warnings +from ..giftiio import read, write +from .test_parse_gifti_fast import DATA_FILE1 -from nibabel.testing import clear_and_catch_warnings -from nibabel.tmpdirs import InTemporaryDirectory +import pytest -from .test_parse_gifti_fast import (DATA_FILE1, DATA_FILE2, DATA_FILE3, - DATA_FILE4, DATA_FILE5, DATA_FILE6) - - -class TestGiftiIO(object): - - def setUp(self): - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) - assert len(w) == 1 - - -def test_read_deprecated(): - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) - from nibabel.gifti.giftiio import read, write - +def test_read_deprecated(tmp_path): + with pytest.deprecated_call(): img = read(DATA_FILE1) - assert len(w) == 1 - with InTemporaryDirectory(): - write(img, 'test.gii') - assert len(w) == 2 + + fname = tmp_path / 'test.gii' + with pytest.deprecated_call(): + write(img, fname) From 5d0f645dbcbfbb3f185032a16cc3bb297142901b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 4 Feb 2020 22:42:31 -0500 Subject: [PATCH 532/689] TEST: Combine imports --- nibabel/streamlines/tests/test_trk.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nibabel/streamlines/tests/test_trk.py b/nibabel/streamlines/tests/test_trk.py index f88631965e..736f61b820 100644 --- a/nibabel/streamlines/tests/test_trk.py +++ b/nibabel/streamlines/tests/test_trk.py @@ -8,8 +8,7 @@ from io import BytesIO import pytest -from ...testing_pytest import data_path -from ...testing_pytest import clear_and_catch_warnings, assert_arr_dict_equal +from ...testing_pytest import data_path, clear_and_catch_warnings, assert_arr_dict_equal from numpy.testing import assert_array_equal from .test_tractogram import assert_tractogram_equal From 979b4396a5b67a4b40ba6800a04a09f2f8790fe1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 5 Feb 2020 11:02:04 -0500 Subject: [PATCH 533/689] TEST: Skip giftiio tests in nose --- .azure-pipelines/windows.yml | 1 + .travis.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 543a672e9f..89b47b8345 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -68,6 +68,7 @@ jobs: -I test_fileutils ^ -I test_floating ^ -I test_funcs ^ + -I test_giftiio ^ -I test_h5py_compat ^ -I test_image_api ^ -I test_image_load_save ^ diff --git a/.travis.yml b/.travis.yml index c1e1ecc094..b869d33947 100644 --- a/.travis.yml +++ b/.travis.yml @@ -155,6 +155,7 @@ script: -I test_fileutils \ -I test_floating \ -I test_funcs \ + -I test_giftiio \ -I test_h5py_compat \ -I test_image_api \ -I test_image_load_save \ From 5fdeed9aadf9a397b211c1139b044095c16c1f25 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 09:53:26 -0500 Subject: [PATCH 534/689] converting more tests from nibabel.tests --- nibabel/tests/test_tripwire.py | 16 ++++++---------- nibabel/tests/test_viewers.py | 28 +++++++++++++++------------- 2 files changed, 21 insertions(+), 23 deletions(-) diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index 990f0bbf39..b69e913d4b 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -3,26 +3,22 @@ from ..tripwire import TripWire, is_tripwire, TripWireError -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) -import pytest; pytestmark = pytest.mark.skip() +import pytest def test_is_tripwire(): - assert_false(is_tripwire(object())) - assert_true(is_tripwire(TripWire('some message'))) + assert not is_tripwire(object()) + assert is_tripwire(TripWire('some message')) def test_tripwire(): # Test tripwire object silly_module_name = TripWire('We do not have silly_module_name') - assert_raises(TripWireError, - getattr, - silly_module_name, - 'do_silly_thing') + with pytest.raises(TripWireError): + getattr(silly_module_name, 'do_silly_thing') # Check AttributeError can be checked too try: silly_module_name.__wrapped__ except TripWireError as err: - assert_true(isinstance(err, AttributeError)) + assert isinstance(err, AttributeError) else: raise RuntimeError("No error raised, but expected") diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index 0e3f076223..1be6e400a2 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -14,16 +14,13 @@ from ..optpkg import optional_package from ..viewers import OrthoSlicer3D -from ..testing import skipif from numpy.testing import assert_array_equal, assert_equal -from nose.tools import assert_raises, assert_true -import pytest; pytestmark = pytest.mark.skip() +import pytest # Need at least MPL 1.3 for viewer tests. matplotlib, has_mpl, _ = optional_package('matplotlib', min_version='1.3') - -needs_mpl = skipif(not has_mpl, 'These tests need matplotlib') +needs_mpl = pytest.mark.skipif(not has_mpl, reason='These tests need matplotlib') if has_mpl: matplotlib.use('Agg') @@ -38,7 +35,7 @@ def test_viewer(): data = data * np.array([1., 2.]) # give it a # of volumes > 1 v = OrthoSlicer3D(data) assert_array_equal(v.position, (0, 0, 0)) - assert_true('OrthoSlicer3D' in repr(v)) + assert 'OrthoSlicer3D' in repr(v) # fake some events, inside and outside axes v._on_scroll(nt('event', 'button inaxes key')('up', None, None)) @@ -53,8 +50,10 @@ def test_viewer(): v.set_volume_idx(1) v.cmap = 'hot' v.clim = (0, 3) - assert_raises(ValueError, OrthoSlicer3D.clim.fset, v, (0.,)) # bad limits - assert_raises(ValueError, OrthoSlicer3D.cmap.fset, v, 'foo') # wrong cmap + with pytest.raises(ValueError): + OrthoSlicer3D.clim.fset(v, (0.,)) # bad limits + with pytest.raises(ValueError): + OrthoSlicer3D.cmap.fset(v, 'foo') # wrong cmap # decrement/increment volume numbers via keypress v.set_volume_idx(1) # should just pass @@ -76,8 +75,8 @@ def test_viewer(): v.close() # complex input should raise a TypeError prior to figure creation - assert_raises(TypeError, OrthoSlicer3D, - data[:, :, :, 0].astype(np.complex64)) + with pytest.raises(TypeError): + OrthoSlicer3D(data[:, :, :, 0].astype(np.complex64)) # other cases fig, axes = plt.subplots(1, 4) @@ -87,10 +86,13 @@ def test_viewer(): float) v2 = OrthoSlicer3D(data, affine=aff, axes=axes[:3]) # bad data (not 3+ dim) - assert_raises(ValueError, OrthoSlicer3D, data[:, :, 0, 0]) + with pytest.raises(ValueError): + OrthoSlicer3D(data[:, :, 0, 0]) # bad affine (not 4x4) - assert_raises(ValueError, OrthoSlicer3D, data, affine=np.eye(3)) - assert_raises(TypeError, v2.link_to, 1) + with pytest.raises(ValueError): + OrthoSlicer3D(data, affine=np.eye(3)) + with pytest.raises(TypeError): + v2.link_to(1) v2.link_to(v1) v2.link_to(v1) # shouldn't do anything v1.close() From 1a435fa521535e8f136fb29f99403d333bf873a0 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 535/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 198 +++++++++++++++++----------------- 1 file changed, 98 insertions(+), 100 deletions(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 5e7defdaa5..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -20,10 +20,8 @@ from ..loadsave import load from ..orientations import flip_axis, aff2axcodes, inv_ornt_aff -from nose.tools import assert_true, assert_false, assert_equal -from nose import SkipTest - -from numpy.testing import assert_almost_equal, assert_array_equal +import pytest +from numpy.testing import assert_almost_equal from .scriptrunner import ScriptRunner from .nibabel_data import needs_nibabel_data @@ -33,7 +31,6 @@ from .test_parrec_data import BALLS, AFF_OFF from ..testing_pytest import assert_data_similar -import pytest; pytestmark = pytest.mark.skip() def _proc_stdout(stdout): stdout_str = stdout.decode('latin1').strip() @@ -56,6 +53,14 @@ def script_test(func): DATA_PATH = abspath(pjoin(dirname(__file__), 'data')) +def load_small_file(): + try: + load(pjoin(DATA_PATH, 'small.mnc')) + return True + except: + return False + + def check_nib_ls_example4d(opts=[], hdrs_str="", other_str=""): # test nib-ls script fname = pjoin(DATA_PATH, 'example4d.nii.gz') @@ -64,7 +69,7 @@ def check_nib_ls_example4d(opts=[], hdrs_str="", other_str=""): % (hdrs_str, other_str)) cmd = ['nib-ls'] + opts + [fname] code, stdout, stderr = run_command(cmd) - assert_equal(fname, stdout[:len(fname)]) + assert fname == stdout[:len(fname)] assert_re_in(expected_re, stdout[len(fname):]) @@ -77,45 +82,45 @@ def check_nib_diff_examples(): "quatern_c", "quatern_d", "qoffset_x", "qoffset_y", "qoffset_z", "srow_x", "srow_y", "srow_z", "DATA(md5)", "DATA(diff 1:)"] for item in checked_fields: - assert_true(item in stdout) + assert item in stdout fnames2 = [pjoin(DATA_PATH, f) for f in ('example4d.nii.gz', 'example4d.nii.gz')] code, stdout, stderr = run_command(['nib-diff'] + fnames2, check_code=False) - assert_equal(stdout, "These files are identical.") + assert stdout == "These files are identical." fnames3 = [pjoin(DATA_PATH, f) for f in ('standard.nii.gz', 'example4d.nii.gz', 'example_nifti2.nii.gz')] code, stdout, stderr = run_command(['nib-diff'] + fnames3, check_code=False) for item in checked_fields: - assert_true(item in stdout) + assert item in stdout fnames4 = [pjoin(DATA_PATH, f) for f in ('standard.nii.gz', 'standard.nii.gz', 'standard.nii.gz')] code, stdout, stderr = run_command(['nib-diff'] + fnames4, check_code=False) - assert_equal(stdout, "These files are identical.") + assert stdout == "These files are identical." code, stdout, stderr = run_command(['nib-diff', '--dt', 'float64'] + fnames, check_code=False) for item in checked_fields: - assert_true(item in stdout) + assert item in stdout - -@script_test -def test_nib_ls(): - yield check_nib_ls_example4d - yield check_nib_ls_example4d, \ - ['-H', 'dim,bitpix'], " \[ 4 128 96 24 2 1 1 1\] 16" - yield check_nib_ls_example4d, ['-c'], "", " !1030 uniques. Use --all-counts" - yield check_nib_ls_example4d, ['-c', '--all-counts'], "", " 2:3 3:2 4:1 5:1.*" +@pytest.mark.parametrize("args", [ + [], + [['-H', 'dim,bitpix'], " \[ 4 128 96 24 2 1 1 1\] 16"], + [['-c'], "", " !1030 uniques. Use --all-counts"], + [['-c', '--all-counts'], "", " 2:3 3:2 4:1 5:1.*"], # both stats and counts - yield check_nib_ls_example4d, \ - ['-c', '-s', '--all-counts'], "", " \[229725\] \[2, 1.2e\+03\] 2:3 3:2 4:1 5:1.*" + [['-c', '-s', '--all-counts'], "", " \[229725\] \[2, 1.2e\+03\] 2:3 3:2 4:1 5:1.*"], # and must not error out if we allow for zeros - yield check_nib_ls_example4d, \ - ['-c', '-s', '-z', '--all-counts'], "", " \[589824\] \[0, 1.2e\+03\] 0:360099 2:3 3:2 4:1 5:1.*" + [['-c', '-s', '-z', '--all-counts'], "", " \[589824\] \[0, 1.2e\+03\] 0:360099 2:3 3:2 4:1 5:1.*"], +]) +@script_test +def test_nib_ls(args): + check_nib_ls_example4d(*args) +@pytest.mark.skipif(not load_small_file(), reason="can't load the small.mnc file") @script_test def test_nib_ls_multiple(): # verify that correctly lists/formats for multiple files @@ -126,42 +131,35 @@ def test_nib_ls_multiple(): ] code, stdout, stderr = run_command(['nib-ls'] + fnames) stdout_lines = stdout.split('\n') - assert_equal(len(stdout_lines), 4) - try: - load(pjoin(DATA_PATH, 'small.mnc')) - except: - raise SkipTest("For the other tests should be able to load MINC files") + assert len(stdout_lines) == 4 # they should be indented correctly. Since all files are int type - ln = max(len(f) for f in fnames) i_str = ' i' if sys.byteorder == 'little' else ' Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 536/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 529e103f46..453c0af9ec 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 9e6cac1432fad5cd22d6412831084c93277d0d06 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 537/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 54 ++-- nibabel/tests/test_spatialimages.py | 370 ++++++++++++++-------------- nibabel/tests/test_spm2analyze.py | 12 +- nibabel/tests/test_spm99analyze.py | 57 +++-- 4 files changed, 254 insertions(+), 239 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 119ecfd5c3..5459571954 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -9,13 +9,9 @@ from ..nifti1 import Nifti1Image from ..eulerangles import euler2mat +import pytest +from numpy.testing import assert_almost_equal -from numpy.testing import (assert_almost_equal, - assert_array_equal) - -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) -import pytest; pytestmark = pytest.mark.skip() def assert_all_in(in_shape, in_affine, out_shape, out_affine): slices = tuple(slice(N) for N in in_shape) @@ -30,8 +26,8 @@ def assert_all_in(in_shape, in_affine, out_shape, out_affine): v2v = new_v2v out_grid = apply_affine(v2v, in_grid) TINY = 1e-12 - assert_true(np.all(out_grid > -TINY)) - assert_true(np.all(out_grid < np.array(out_shape) + TINY)) + assert np.all(out_grid > -TINY) + assert np.all(out_grid < np.array(out_shape) + TINY) def get_outspace_params(): @@ -80,27 +76,32 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) - assert_array_equal(shape, (2, 3, 4)) - assert_array_equal(aff, np.eye(4)) + assert shape == (2, 3, 4) + assert (aff == np.eye(4)).all() for in_shape, in_aff, vox, out_shape, out_aff in get_outspace_params(): img = Nifti1Image(np.ones(in_shape), in_aff) for input in ((in_shape, in_aff), img): shape, aff = vox2out_vox(input, vox) assert_all_in(in_shape, in_aff, shape, aff) - assert_equal(shape, out_shape) + assert shape == out_shape assert_almost_equal(aff, out_aff) - assert_true(isinstance(shape, tuple)) - assert_true(isinstance(shape[0], int)) + assert isinstance(shape, tuple) + assert isinstance(shape[0], int) # Enforce number of axes - assert_raises(ValueError, vox2out_vox, ((2, 3, 4, 5), np.eye(4))) - assert_raises(ValueError, vox2out_vox, ((2, 3, 4, 5, 6), np.eye(4))) - # Voxel sizes must be positive - assert_raises(ValueError, vox2out_vox, ((2, 3, 4), np.eye(4), [-1, 1, 1])) - assert_raises(ValueError, vox2out_vox, ((2, 3, 4), np.eye(4), [1, 0, 1])) + with pytest.raises(ValueError): + vox2out_vox(arg_tuple) def test_slice2volume(): @@ -112,7 +113,14 @@ def test_slice2volume(): for val in (0, 5, 10): exp_aff = np.array(def_aff) exp_aff[axis, -1] = val - assert_array_equal(slice2volume(val, axis), exp_aff) - assert_raises(ValueError, slice2volume, -1, 0) - assert_raises(ValueError, slice2volume, 0, -1) - assert_raises(ValueError, slice2volume, 0, 3) + assert (slice2volume(val, axis) == exp_aff).all() + + +@pytest.mark.parametrize("args", [ + [-1, 0], + [0, -1], + [0, 3] +]) +def test_slice2volume_exception(args): + with pytest.raises(ValueError): + slice2volume(*args) \ No newline at end of file diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 5b4706bea1..288096638e 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -15,58 +15,59 @@ import numpy as np from io import BytesIO -from ..spatialimages import (SpatialHeader, SpatialImage, HeaderDataError, - Header, ImageDataError) +from ..spatialimages import SpatialHeader, SpatialImage, HeaderDataError, Header from ..imageclasses import spatial_axes_first +import pytest from unittest import TestCase -from nose.tools import (assert_true, assert_false, assert_equal, - assert_not_equal, assert_raises) -from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_warns +from numpy.testing import assert_array_almost_equal + +from ..testing_pytest import ( + bytesio_round_trip, + clear_and_catch_warnings, + suppress_warnings, + memmap_after_ufunc +) -from ..testing import (clear_and_catch_warnings, suppress_warnings, - memmap_after_ufunc) -from ..testing_pytest import bytesio_round_trip from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError from .. import load as top_load -import pytest; pytestmark = pytest.mark.skip() def test_header_init(): # test the basic header hdr = Header() - assert_equal(hdr.get_data_dtype(), np.dtype(np.float32)) - assert_equal(hdr.get_data_shape(), (0,)) - assert_equal(hdr.get_zooms(), (1.0,)) + assert hdr.get_data_dtype() == np.dtype(np.float32) + assert hdr.get_data_shape() == (0,) + assert hdr.get_zooms() == (1.0,) hdr = Header(np.float64) - assert_equal(hdr.get_data_dtype(), np.dtype(np.float64)) - assert_equal(hdr.get_data_shape(), (0,)) - assert_equal(hdr.get_zooms(), (1.0,)) + assert hdr.get_data_dtype() == np.dtype(np.float64) + assert hdr.get_data_shape() == (0,) + assert hdr.get_zooms() == (1.0,) hdr = Header(np.float64, shape=(1, 2, 3)) - assert_equal(hdr.get_data_dtype(), np.dtype(np.float64)) - assert_equal(hdr.get_data_shape(), (1, 2, 3)) - assert_equal(hdr.get_zooms(), (1.0, 1.0, 1.0)) + assert hdr.get_data_dtype() == np.dtype(np.float64) + assert hdr.get_data_shape() == (1, 2, 3) + assert hdr.get_zooms() == (1.0, 1.0, 1.0) hdr = Header(np.float64, shape=(1, 2, 3), zooms=None) - assert_equal(hdr.get_data_dtype(), np.dtype(np.float64)) - assert_equal(hdr.get_data_shape(), (1, 2, 3)) - assert_equal(hdr.get_zooms(), (1.0, 1.0, 1.0)) + assert hdr.get_data_dtype() == np.dtype(np.float64) + assert hdr.get_data_shape() == (1, 2, 3) + assert hdr.get_zooms() == (1.0, 1.0, 1.0) hdr = Header(np.float64, shape=(1, 2, 3), zooms=(3.0, 2.0, 1.0)) - assert_equal(hdr.get_data_dtype(), np.dtype(np.float64)) - assert_equal(hdr.get_data_shape(), (1, 2, 3)) - assert_equal(hdr.get_zooms(), (3.0, 2.0, 1.0)) + assert hdr.get_data_dtype() == np.dtype(np.float64) + assert hdr.get_data_shape() == (1, 2, 3) + assert hdr.get_zooms() == (3.0, 2.0, 1.0) def test_from_header(): # check from header class method. Note equality checks below, # equality methods used here too. empty = Header.from_header() - assert_equal(Header(), empty) + assert Header() == empty empty = Header.from_header(None) - assert_equal(Header(), empty) + assert Header() == empty hdr = Header(np.float64, shape=(1, 2, 3), zooms=(3.0, 2.0, 1.0)) copy = Header.from_header(hdr) - assert_equal(hdr, copy) - assert_false(hdr is copy) + assert hdr == copy + assert not hdr is copy class C(object): @@ -76,25 +77,25 @@ def get_data_shape(self): return (5, 4, 3) def get_zooms(self): return (10.0, 9.0, 8.0) converted = Header.from_header(C()) - assert_true(isinstance(converted, Header)) - assert_equal(converted.get_data_dtype(), np.dtype('u2')) - assert_equal(converted.get_data_shape(), (5, 4, 3)) - assert_equal(converted.get_zooms(), (10.0, 9.0, 8.0)) + assert isinstance(converted, Header) + assert converted.get_data_dtype() == np.dtype('u2') + assert converted.get_data_shape() == (5, 4, 3) + assert converted.get_zooms() == (10.0, 9.0, 8.0) def test_eq(): hdr = Header() other = Header() - assert_equal(hdr, other) + assert hdr == other other = Header('u2') - assert_not_equal(hdr, other) + assert hdr != other other = Header(shape=(1, 2, 3)) - assert_not_equal(hdr, other) + assert hdr != other hdr = Header(shape=(1, 2)) other = Header(shape=(1, 2)) - assert_equal(hdr, other) + assert hdr == other other = Header(shape=(1, 2), zooms=(2.0, 3.0)) - assert_not_equal(hdr, other) + assert hdr != other def test_copy(): @@ -102,51 +103,50 @@ def test_copy(): hdr = Header(np.float64, shape=(1, 2, 3), zooms=(3.0, 2.0, 1.0)) hdr_copy = hdr.copy() hdr.set_data_shape((4, 5, 6)) - assert_equal(hdr.get_data_shape(), (4, 5, 6)) - assert_equal(hdr_copy.get_data_shape(), (1, 2, 3)) + assert hdr.get_data_shape() == (4, 5, 6) + assert hdr_copy.get_data_shape() == (1, 2, 3) hdr.set_zooms((4, 5, 6)) - assert_equal(hdr.get_zooms(), (4, 5, 6)) - assert_equal(hdr_copy.get_zooms(), (3, 2, 1)) + assert hdr.get_zooms() == (4, 5, 6) + assert hdr_copy.get_zooms() == (3, 2, 1) hdr.set_data_dtype(np.uint8) - assert_equal(hdr.get_data_dtype(), np.dtype(np.uint8)) - assert_equal(hdr_copy.get_data_dtype(), np.dtype(np.float64)) + assert hdr.get_data_dtype() == np.dtype(np.uint8) + assert hdr_copy.get_data_dtype() == np.dtype(np.float64) def test_shape_zooms(): hdr = Header() hdr.set_data_shape((1, 2, 3)) - assert_equal(hdr.get_data_shape(), (1, 2, 3)) - assert_equal(hdr.get_zooms(), (1.0, 1.0, 1.0)) + assert hdr.get_data_shape() == (1, 2, 3) + assert hdr.get_zooms() == (1.0, 1.0, 1.0) hdr.set_zooms((4, 3, 2)) - assert_equal(hdr.get_zooms(), (4.0, 3.0, 2.0)) + assert hdr.get_zooms() == (4.0, 3.0, 2.0) hdr.set_data_shape((1, 2)) - assert_equal(hdr.get_data_shape(), (1, 2)) - assert_equal(hdr.get_zooms(), (4.0, 3.0)) + assert hdr.get_data_shape() == (1, 2) + assert hdr.get_zooms() == (4.0, 3.0) hdr.set_data_shape((1, 2, 3)) - assert_equal(hdr.get_data_shape(), (1, 2, 3)) - assert_equal(hdr.get_zooms(), (4.0, 3.0, 1.0)) + assert hdr.get_data_shape() == (1, 2, 3) + assert hdr.get_zooms() == (4.0, 3.0, 1.0) # null shape is (0,) hdr.set_data_shape(()) - assert_equal(hdr.get_data_shape(), (0,)) - assert_equal(hdr.get_zooms(), (1.0,)) + assert hdr.get_data_shape() == (0,) + assert hdr.get_zooms() == (1.0,) # zooms of wrong lengths raise error - assert_raises(HeaderDataError, hdr.set_zooms, (4.0, 3.0)) - assert_raises(HeaderDataError, - hdr.set_zooms, - (4.0, 3.0, 2.0, 1.0)) + with pytest.raises(HeaderDataError): + hdr.set_zooms((4.0, 3.0)) + with pytest.raises(HeaderDataError): + hdr.set_zooms((4.0, 3.0, 2.0, 1.0)) # as do negative zooms - assert_raises(HeaderDataError, - hdr.set_zooms, - (4.0, 3.0, -2.0)) + with pytest.raises(HeaderDataError): + hdr.set_zooms((4.0, 3.0, -2.0)) def test_data_dtype(): hdr = Header() - assert_equal(hdr.get_data_dtype(), np.dtype(np.float32)) + assert hdr.get_data_dtype() == np.dtype(np.float32) hdr.set_data_dtype(np.float64) - assert_equal(hdr.get_data_dtype(), np.dtype(np.float64)) + assert hdr.get_data_dtype() == np.dtype(np.float64) hdr.set_data_dtype('u2') - assert_equal(hdr.get_data_dtype(), np.dtype(np.uint16)) + assert hdr.get_data_dtype() == np.dtype(np.uint16) def test_affine(): @@ -162,8 +162,7 @@ def test_affine(): [0, 2, 0, -1], [0, 0, 1, -1], [0, 0, 0, 1]]) - assert_array_equal(hdr.get_base_affine(), - hdr.get_best_affine()) + assert (hdr.get_base_affine() == hdr.get_best_affine()).all() def test_read_data(): @@ -174,22 +173,22 @@ class CHeader(SpatialHeader): fobj = BytesIO() data = np.arange(6).reshape((1, 2, 3)) hdr.data_to_fileobj(data, fobj) - assert_equal(fobj.getvalue(), + assert (fobj.getvalue() == data.astype(np.int32).tostring(order=order)) # data_to_fileobj accepts kwarg 'rescale', but no effect in this case fobj.seek(0) hdr.data_to_fileobj(data, fobj, rescale=True) - assert_equal(fobj.getvalue(), + assert (fobj.getvalue() == data.astype(np.int32).tostring(order=order)) # data_to_fileobj can be a list fobj.seek(0) hdr.data_to_fileobj(data.tolist(), fobj, rescale=True) - assert_equal(fobj.getvalue(), + assert (fobj.getvalue() == data.astype(np.int32).tostring(order=order)) # Read data back again fobj.seek(0) data2 = hdr.data_from_fileobj(fobj) - assert_array_equal(data, data2) + assert (data == data2).all() class DataLike(object): @@ -211,33 +210,33 @@ def test_isolation(self): arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) aff = np.eye(4) img = img_klass(arr, aff) - assert_array_equal(img.affine, aff) + assert (img.affine == aff).all() aff[0, 0] = 99 - assert_false(np.all(img.affine == aff)) + assert not np.all(img.affine == aff) # header, created by image creation ihdr = img.header # Pass it back in img = img_klass(arr, aff, ihdr) # Check modifying header outside does not modify image ihdr.set_zooms((4, 5, 6)) - assert_not_equal(img.header, ihdr) + assert img.header != ihdr def test_float_affine(self): # Check affines get converted to float img_klass = self.image_class arr = np.arange(3, dtype=np.int16) img = img_klass(arr, np.eye(4, dtype=np.float32)) - assert_equal(img.affine.dtype, np.dtype(np.float64)) + assert img.affine.dtype == np.dtype(np.float64) img = img_klass(arr, np.eye(4, dtype=np.int16)) - assert_equal(img.affine.dtype, np.dtype(np.float64)) + assert img.affine.dtype == np.dtype(np.float64) def test_images(self): # Assumes all possible images support int16 # See https://github.com/nipy/nibabel/issues/58 arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) img = self.image_class(arr, None) - assert_array_equal(img.get_fdata(), arr) - assert_equal(img.affine, None) + assert (img.get_fdata() == arr).all() + assert img.affine == None def test_default_header(self): # Check default header is as expected @@ -246,21 +245,21 @@ def test_default_header(self): hdr = self.image_class.header_class() hdr.set_data_shape(arr.shape) hdr.set_data_dtype(arr.dtype) - assert_equal(img.header, hdr) + assert img.header == hdr def test_data_api(self): # Test minimal api data object can initialize img = self.image_class(DataLike(), None) # Shape may be promoted to higher dimension, but may not reorder or # change size - assert_array_equal(img.get_fdata().flatten(), np.arange(3)) - assert_equal(img.shape[:1], (3,)) - assert_equal(np.prod(img.shape), 3) + assert (img.get_fdata().flatten() == np.arange(3)).all() + assert img.shape[:1] == (3,) + assert np.prod(img.shape) == 3 def check_dtypes(self, expected, actual): # Some images will want dtypes to be equal including endianness, # others may only require the same type - assert_equal(expected, actual) + assert expected == actual def test_data_default(self): # check that the default dtype comes from the data if the header @@ -285,10 +284,10 @@ def test_data_shape(self): img = img_klass(arr, np.eye(4)) # Shape may be promoted to higher dimension, but may not reorder or # change size - assert_equal(img.shape[:1], (4,)) - assert_equal(np.prod(img.shape), 4) + assert img.shape[:1] == (4,) + assert np.prod(img.shape) == 4 img = img_klass(np.zeros((2, 3, 4), dtype=np.float32), np.eye(4)) - assert_equal(img.shape, (2, 3, 4)) + assert img.shape == (2, 3, 4) def test_str(self): # Check something comes back from string representation @@ -297,13 +296,13 @@ def test_str(self): # See https://github.com/nipy/nibabel/issues/58 arr = np.arange(5, dtype=np.int16) img = img_klass(arr, np.eye(4)) - assert_true(len(str(img)) > 0) + assert len(str(img)) > 0 # Shape may be promoted to higher dimension, but may not reorder or # change size - assert_equal(img.shape[:1], (5,)) - assert_equal(np.prod(img.shape), 5) + assert img.shape[:1] == (5,) + assert np.prod(img.shape) == 5 img = img_klass(np.zeros((2, 3, 4), dtype=np.int16), np.eye(4)) - assert_true(len(str(img)) > 0) + assert len(str(img)) > 0 def test_get_shape(self): # Check that get_shape raises an ExpiredDeprecationError @@ -311,7 +310,7 @@ def test_get_shape(self): # Assumes all possible images support int16 # See https://github.com/nipy/nibabel/issues/58 img = img_klass(np.arange(1, dtype=np.int16), np.eye(4)) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): img.get_shape() def test_get_fdata(self): @@ -320,55 +319,57 @@ def test_get_fdata(self): in_data_template = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) in_data = in_data_template.copy() img = img_klass(in_data, None) - assert_true(in_data is img.dataobj) + assert in_data is img.dataobj # The get_fdata method changes the array to floating point type - assert_equal(img.get_fdata(dtype='f4').dtype, np.dtype(np.float32)) + assert img.get_fdata(dtype='f4').dtype == np.dtype(np.float32) fdata_32 = img.get_fdata(dtype=np.float32) - assert_equal(fdata_32.dtype, np.dtype(np.float32)) + assert fdata_32.dtype == np.dtype(np.float32) # Caching is specific to data dtype. If we reload with default data # type, the cache gets reset fdata_32[:] = 99 # Cache has been modified, we pick up the modifications, but only for # the cached data type - assert_array_equal(img.get_fdata(dtype='f4'), 99) + assert (img.get_fdata(dtype='f4') == 99).all() fdata_64 = img.get_fdata() - assert_equal(fdata_64.dtype, np.dtype(np.float64)) - assert_array_equal(fdata_64, in_data) + assert fdata_64.dtype == np.dtype(np.float64) + assert (fdata_64 == in_data).all() fdata_64[:] = 101 - assert_array_equal(img.get_fdata(dtype='f8'), 101) - assert_array_equal(img.get_fdata(), 101) + assert (img.get_fdata(dtype='f8') == 101).all() + assert (img.get_fdata() == 101).all() # Reloading with new data type blew away the float32 cache - assert_array_equal(img.get_fdata(dtype='f4'), in_data) + assert (img.get_fdata(dtype='f4') == in_data).all() img.uncache() # Now recaching, is float64 out_data = img.get_fdata() - assert_equal(out_data.dtype, np.dtype(np.float64)) + assert out_data.dtype == np.dtype(np.float64) # Input dtype needs to be floating point - assert_raises(ValueError, img.get_fdata, dtype=np.int16) - assert_raises(ValueError, img.get_fdata, dtype=np.int32) + with pytest.raises(ValueError): + img.get_fdata(dtype=np.int16) + with pytest.raises(ValueError): + img.get_fdata(dtype=np.int32) # The cache is filled out_data[:] = 42 - assert_true(img.get_fdata() is out_data) + assert img.get_fdata() is out_data img.uncache() - assert_false(img.get_fdata() is out_data) + assert not img.get_fdata() is out_data # The 42 has gone now. - assert_array_equal(img.get_fdata(), in_data_template) + assert (img.get_fdata() == in_data_template).all() # If we can save, we can create a proxy image if not self.can_save: return rt_img = bytesio_round_trip(img) - assert_false(in_data is rt_img.dataobj) - assert_array_equal(rt_img.dataobj, in_data) + assert not in_data is rt_img.dataobj + assert (rt_img.dataobj == in_data).all() out_data = rt_img.get_fdata() - assert_array_equal(out_data, in_data) - assert_false(rt_img.dataobj is out_data) - assert_equal(out_data.dtype, np.dtype(np.float64)) + assert (out_data == in_data).all() + assert not rt_img.dataobj is out_data + assert out_data.dtype == np.dtype(np.float64) # cache - assert_true(rt_img.get_fdata() is out_data) + assert rt_img.get_fdata() is out_data out_data[:] = 42 rt_img.uncache() - assert_false(rt_img.get_fdata() is out_data) - assert_array_equal(rt_img.get_fdata(), in_data) + assert not rt_img.get_fdata() is out_data + assert (rt_img.get_fdata() == in_data).all() def test_get_data(self): # Test array image and proxy image interface @@ -377,41 +378,41 @@ def test_get_data(self): in_data = in_data_template.copy() img = img_klass(in_data, None) # Can't slice into the image object: - with assert_raises(TypeError) as exception_manager: + with pytest.raises(TypeError) as exception_manager: img[0, 0, 0] # Make sure the right message gets raised: - assert_equal(str(exception_manager.exception), + assert (str(exception_manager.value) == "Cannot slice image objects; consider using " "`img.slicer[slice]` to generate a sliced image (see " "documentation for caveats) or slicing image array data " "with `img.dataobj[slice]` or `img.get_fdata()[slice]`") - assert_true(in_data is img.dataobj) - with assert_warns(DeprecationWarning): + assert in_data is img.dataobj + with pytest.warns(DeprecationWarning): out_data = img.get_data() - assert_true(in_data is out_data) + assert in_data is out_data # and that uncache has no effect img.uncache() - assert_true(in_data is out_data) - assert_array_equal(out_data, in_data_template) + assert in_data is out_data + assert (out_data == in_data_template).all() # If we can save, we can create a proxy image if not self.can_save: return rt_img = bytesio_round_trip(img) - assert_false(in_data is rt_img.dataobj) - assert_array_equal(rt_img.dataobj, in_data) - with assert_warns(DeprecationWarning): + assert not in_data is rt_img.dataobj + assert (rt_img.dataobj == in_data).all() + with pytest.warns(DeprecationWarning): out_data = rt_img.get_data() - assert_array_equal(out_data, in_data) - assert_false(rt_img.dataobj is out_data) + assert (out_data == in_data).all() + assert not rt_img.dataobj is out_data # cache - with assert_warns(DeprecationWarning): - assert_true(rt_img.get_data() is out_data) + with pytest.warns(DeprecationWarning): + assert rt_img.get_data() is out_data out_data[:] = 42 rt_img.uncache() - with assert_warns(DeprecationWarning): - assert_false(rt_img.get_data() is out_data) - with assert_warns(DeprecationWarning): - assert_array_equal(rt_img.get_data(), in_data) + with pytest.warns(DeprecationWarning): + assert not rt_img.get_data() is out_data + with pytest.warns(DeprecationWarning): + assert (rt_img.get_data() == in_data).all() def test_slicer(self): img_klass = self.image_class @@ -424,11 +425,11 @@ def test_slicer(self): img = img_klass(in_data, base_affine.copy()) if not spatial_axes_first(img): - with assert_raises(ValueError): + with pytest.raises(ValueError): img.slicer continue - assert_true(hasattr(img.slicer, '__getitem__')) + assert hasattr(img.slicer, '__getitem__') # Note spatial zooms are always first 3, even when spatial_zooms = img.header.get_zooms()[:3] @@ -437,49 +438,49 @@ def test_slicer(self): sliceobj = [slice(None, None, 2)] * 3 + \ [slice(None)] * (len(dshape) - 3) downsampled_img = img.slicer[tuple(sliceobj)] - assert_array_equal(downsampled_img.header.get_zooms()[:3], - np.array(spatial_zooms) * 2) + assert (downsampled_img.header.get_zooms()[:3] + == np.array(spatial_zooms) * 2).all() max4d = (hasattr(img.header, '_structarr') and 'dims' in img.header._structarr.dtype.fields and img.header._structarr['dims'].shape == (4,)) # Check newaxis and single-slice errors - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[None] - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[0] # Axes 1 and 2 are always spatial - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[:, None] - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[:, 0] - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[:, :, None] - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[:, :, 0] if len(img.shape) == 4: if max4d: - with assert_raises(ValueError): + with pytest.raises(ValueError): img.slicer[:, :, :, None] else: # Reorder non-spatial axes - assert_equal(img.slicer[:, :, :, None].shape, + assert (img.slicer[:, :, :, None].shape == img.shape[:3] + (1,) + img.shape[3:]) # 4D to 3D using ellipsis or slices - assert_equal(img.slicer[..., 0].shape, img.shape[:-1]) - assert_equal(img.slicer[:, :, :, 0].shape, img.shape[:-1]) + assert img.slicer[..., 0].shape == img.shape[:-1] + assert img.slicer[:, :, :, 0].shape == img.shape[:-1] else: # 3D Analyze/NIfTI/MGH to 4D - assert_equal(img.slicer[:, :, :, None].shape, img.shape + (1,)) + assert img.slicer[:, :, :, None].shape == img.shape + (1,) if len(img.shape) == 3: # Slices exceed dimensions - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[:, :, :, :, None] elif max4d: - with assert_raises(ValueError): + with pytest.raises(ValueError): img.slicer[:, :, :, :, None] else: - assert_equal(img.slicer[:, :, :, :, None].shape, + assert (img.slicer[:, :, :, :, None].shape == img.shape + (1,)) # Crop by one voxel in each dimension @@ -489,35 +490,35 @@ def test_slicer(self): sliced_ijk = img.slicer[1:, 1:, 1:] # No scaling change - assert_array_equal(sliced_i.affine[:3, :3], img.affine[:3, :3]) - assert_array_equal(sliced_j.affine[:3, :3], img.affine[:3, :3]) - assert_array_equal(sliced_k.affine[:3, :3], img.affine[:3, :3]) - assert_array_equal(sliced_ijk.affine[:3, :3], img.affine[:3, :3]) + assert (sliced_i.affine[:3, :3] == img.affine[:3, :3]).all() + assert (sliced_j.affine[:3, :3] == img.affine[:3, :3]).all() + assert (sliced_k.affine[:3, :3] == img.affine[:3, :3]).all() + assert (sliced_ijk.affine[:3, :3] == img.affine[:3, :3]).all() # Translation - assert_array_equal(sliced_i.affine[:, 3], [1, 0, 0, 1]) - assert_array_equal(sliced_j.affine[:, 3], [0, 1, 0, 1]) - assert_array_equal(sliced_k.affine[:, 3], [0, 0, 1, 1]) - assert_array_equal(sliced_ijk.affine[:, 3], [1, 1, 1, 1]) + assert (sliced_i.affine[:, 3] == [1, 0, 0, 1]).all() + assert (sliced_j.affine[:, 3] == [0, 1, 0, 1]).all() + assert (sliced_k.affine[:, 3] == [0, 0, 1, 1]).all() + assert (sliced_ijk.affine[:, 3] == [1, 1, 1, 1]).all() # No change to affines with upper-bound slices - assert_array_equal(img.slicer[:1, :1, :1].affine, img.affine) + assert (img.slicer[:1, :1, :1].affine == img.affine).all() # Yell about step = 0 - with assert_raises(ValueError): + with pytest.raises(ValueError): img.slicer[:, ::0] - with assert_raises(ValueError): + with pytest.raises(ValueError): img.slicer.slice_affine((slice(None), slice(None, None, 0))) # Don't permit zero-length slices - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[:0] # No fancy indexing - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[[0]] - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[[-1]] - with assert_raises(IndexError): + with pytest.raises(IndexError): img.slicer[[0], [-1]] # Check data is consistent with slicing numpy arrays @@ -534,14 +535,14 @@ def test_slicer(self): pass else: sliced_data = in_data[sliceobj] - with assert_warns(DeprecationWarning): - assert_array_equal(sliced_data, sliced_img.get_data()) - assert_array_equal(sliced_data, sliced_img.get_fdata()) - assert_array_equal(sliced_data, sliced_img.dataobj) - assert_array_equal(sliced_data, img.dataobj[sliceobj]) - with assert_warns(DeprecationWarning): - assert_array_equal(sliced_data, img.get_data()[sliceobj]) - assert_array_equal(sliced_data, img.get_fdata()[sliceobj]) + with pytest.warns(DeprecationWarning): + assert (sliced_data == sliced_img.get_data()).all() + assert (sliced_data == sliced_img.get_fdata()).all() + assert (sliced_data == sliced_img.dataobj).all() + assert (sliced_data == img.dataobj[sliceobj]).all() + with pytest.warns(DeprecationWarning): + assert (sliced_data == img.get_data()[sliceobj]).all() + assert (sliced_data == img.get_fdata()[sliceobj]).all() def test_api_deprecations(self): @@ -563,13 +564,13 @@ def from_file_map(self, file_map=None): bio = BytesIO() file_map = FakeImage.make_file_map({'image': bio}) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): img.to_files(file_map) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): img.to_filespec('an_image') - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): FakeImage.from_files(file_map) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): FakeImage.filespec_to_files('an_image') @@ -634,19 +635,20 @@ def test_load_mmap(self): back_img = func(param1, **kwargs) back_data = np.asanyarray(back_img.dataobj) if expected_mode is None: - assert_false(isinstance(back_data, np.memmap), - 'Should not be a %s' % img_klass.__name__) + assert not isinstance(back_data, np.memmap), 'Should not be a %s' % img_klass.__name__ else: - assert_true(isinstance(back_data, np.memmap), - 'Not a %s' % img_klass.__name__) + assert isinstance(back_data, np.memmap), 'Not a %s' % img_klass.__name__ if self.check_mmap_mode: - assert_equal(back_data.mode, expected_mode) + assert back_data.mode == expected_mode del back_img, back_data # Check that mmap is keyword-only - assert_raises(TypeError, func, param1, True) + with pytest.raises(TypeError): + func(param1, True) # Check invalid values raise error - assert_raises(ValueError, func, param1, mmap='rw') - assert_raises(ValueError, func, param1, mmap='r+') + with pytest.raises(ValueError): + func(param1, mmap='rw') + with pytest.raises(ValueError): + func(param1, mmap='r+') def test_header_deprecated(): @@ -655,7 +657,7 @@ def test_header_deprecated(): class MyHeader(Header): pass - assert_equal(len(w), 0) + assert len(w) == 0 MyHeader() - assert_equal(len(w), 1) + assert len(w) == 1 diff --git a/nibabel/tests/test_spm2analyze.py b/nibabel/tests/test_spm2analyze.py index ddf2956f7d..a88d3cafd4 100644 --- a/nibabel/tests/test_spm2analyze.py +++ b/nibabel/tests/test_spm2analyze.py @@ -13,19 +13,18 @@ from ..spatialimages import HeaderTypeError, HeaderDataError from ..spm2analyze import Spm2AnalyzeHeader, Spm2AnalyzeImage +import pytest from numpy.testing import assert_array_equal -from ..testing import assert_equal, assert_raises from . import test_spm99analyze -import pytest; pytestmark = pytest.mark.skip() class TestSpm2AnalyzeHeader(test_spm99analyze.TestSpm99AnalyzeHeader): header_class = Spm2AnalyzeHeader def test_slope_inter(self): hdr = self.header_class() - assert_equal(hdr.get_slope_inter(), (1.0, 0.0)) + assert hdr.get_slope_inter() == (1.0, 0.0) for in_tup, exp_err, out_tup, raw_slope in ( ((2.0,), None, (2.0, 0.), 2.), ((None,), None, (None, None), np.nan), @@ -43,16 +42,17 @@ def test_slope_inter(self): ((None, 0.0), None, (None, None), np.nan)): hdr = self.header_class() if not exp_err is None: - assert_raises(exp_err, hdr.set_slope_inter, *in_tup) + with pytest.raises(exp_err): + hdr.set_slope_inter(*in_tup) # raw set if not in_tup[0] is None: hdr['scl_slope'] = in_tup[0] else: hdr.set_slope_inter(*in_tup) - assert_equal(hdr.get_slope_inter(), out_tup) + assert hdr.get_slope_inter() == out_tup # Check set survives through checking hdr = Spm2AnalyzeHeader.from_header(hdr, check=True) - assert_equal(hdr.get_slope_inter(), out_tup) + assert hdr.get_slope_inter() == out_tup assert_array_equal(hdr['scl_slope'], raw_slope) diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index ecc63e5935..e1d559878a 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -12,13 +12,15 @@ from io import BytesIO -from numpy.testing import assert_array_equal, assert_array_almost_equal, dec +from numpy.testing import assert_array_equal, assert_array_almost_equal +import pytest # Decorator to skip tests requiring save / load if scipy not available for mat # files from ..optpkg import optional_package _, have_scipy, _ = optional_package('scipy') -scipy_skip = dec.skipif(not have_scipy, 'scipy not available') + +scipy_skip = pytest.mark.skipif(not have_scipy, reason='scipy not available') from ..spm99analyze import (Spm99AnalyzeHeader, Spm99AnalyzeImage, HeaderTypeError) @@ -26,13 +28,14 @@ from ..volumeutils import apply_read_scaling, _dt_min_max from ..spatialimages import supported_np_types, HeaderDataError -from nose.tools import assert_true, assert_false, assert_equal, assert_raises - -from ..testing import assert_allclose_safely, suppress_warnings -from ..testing_pytest import bytesio_round_trip, bytesio_filemap +from ..testing_pytest import ( + bytesio_round_trip, + bytesio_filemap, + assert_allclose_safely, + suppress_warnings +) from . import test_analyze -import pytest; pytestmark = pytest.mark.skip() FLOAT_TYPES = np.sctypes['float'] COMPLEX_TYPES = np.sctypes['complex'] @@ -62,7 +65,7 @@ def test_data_scaling(self): # almost equal assert_array_almost_equal(data, data_back, 4) # But not quite - assert_false(np.all(data == data_back)) + assert not np.all(data == data_back) # This is exactly the same call, just testing it works twice data_back2 = hdr.data_from_fileobj(S3) assert_array_equal(data_back, data_back2, 4) @@ -70,12 +73,12 @@ def test_data_scaling(self): hdr.data_to_fileobj(data, S3, rescale=True) data_back = hdr.data_from_fileobj(S3) assert_array_almost_equal(data, data_back, 4) - assert_false(np.all(data == data_back)) + assert not np.all(data == data_back) # This doesn't use scaling, and so gets perfect precision with np.errstate(invalid='ignore'): hdr.data_to_fileobj(data, S3, rescale=False) data_back = hdr.data_from_fileobj(S3) - assert_true(np.all(data == data_back)) + assert np.all(data == data_back) class TestSpm99AnalyzeHeader(test_analyze.TestAnalyzeHeader, @@ -85,7 +88,7 @@ class TestSpm99AnalyzeHeader(test_analyze.TestAnalyzeHeader, def test_empty(self): super(TestSpm99AnalyzeHeader, self).test_empty() hdr = self.header_class() - assert_equal(hdr['scl_slope'], 1) + assert hdr['scl_slope'] == 1 def test_big_scaling(self): # Test that upcasting works for huge scalefactors @@ -99,11 +102,11 @@ def test_big_scaling(self): data = np.array([type_info(dtt)['max']], dtype=dtt)[:, None, None] hdr.data_to_fileobj(data, sio) data_back = hdr.data_from_fileobj(sio) - assert_true(np.allclose(data, data_back)) + assert np.allclose(data, data_back) def test_slope_inter(self): hdr = self.header_class() - assert_equal(hdr.get_slope_inter(), (1.0, None)) + assert hdr.get_slope_inter() == (1.0, None) for in_tup, exp_err, out_tup, raw_slope in ( ((2.0,), None, (2.0, None), 2.), ((None,), None, (None, None), np.nan), @@ -121,16 +124,17 @@ def test_slope_inter(self): ((None, 0.0), None, (None, None), np.nan)): hdr = self.header_class() if not exp_err is None: - assert_raises(exp_err, hdr.set_slope_inter, *in_tup) + with pytest.raises(exp_err): + hdr.set_slope_inter(*in_tup) # raw set if not in_tup[0] is None: hdr['scl_slope'] = in_tup[0] else: hdr.set_slope_inter(*in_tup) - assert_equal(hdr.get_slope_inter(), out_tup) + assert hdr.get_slope_inter() == out_tup # Check set survives through checking hdr = Spm99AnalyzeHeader.from_header(hdr, check=True) - assert_equal(hdr.get_slope_inter(), out_tup) + assert hdr.get_slope_inter() == out_tup assert_array_equal(hdr['scl_slope'], raw_slope) def test_origin_checks(self): @@ -140,14 +144,15 @@ def test_origin_checks(self): hdr.data_shape = [1, 1, 1] hdr['origin'][0] = 101 # severity 20 fhdr, message, raiser = self.log_chk(hdr, 20) - assert_equal(fhdr, hdr) - assert_equal(message, 'very large origin values ' + assert fhdr == hdr + assert (message == 'very large origin values ' 'relative to dims; leaving as set, ' 'ignoring for affine') - assert_raises(*raiser) + with pytest.raises(raiser[0]): + raiser[1](*raiser[2:]) # diagnose binary block dxer = self.header_class.diagnose_binaryblock - assert_equal(dxer(hdr.binaryblock), + assert (dxer(hdr.binaryblock) == 'very large origin values ' 'relative to dims') @@ -166,9 +171,9 @@ def assert_scale_me_scaling(self, hdr): # Assert that header `hdr` has "scale-me" scaling slope, inter = self._get_raw_scaling(hdr) if not slope is None: - assert_true(np.isnan(slope)) + assert np.isnan(slope) if not inter is None: - assert_true(np.isnan(inter)) + assert np.isnan(inter) def _get_raw_scaling(self, hdr): return hdr['scl_slope'], None @@ -399,7 +404,7 @@ def test_nan2zero_range_ok(self): img.set_data_dtype(np.uint8) with np.errstate(invalid='ignore'): rt_img = bytesio_round_trip(img) - assert_equal(rt_img.get_fdata()[0, 0, 0], 0) + assert rt_img.get_fdata()[0, 0, 0] == 0 class TestSpm99AnalyzeImage(test_analyze.TestAnalyzeImage, ImageScalingMixin): @@ -460,7 +465,7 @@ def test_mat_read(self): from scipy.io import loadmat, savemat mat_fileobj.seek(0) mats = loadmat(mat_fileobj) - assert_true('M' in mats and 'mat' in mats) + assert 'M' in mats and 'mat' in mats from_111 = np.eye(4) from_111[:3, 3] = -1 to_111 = np.eye(4) @@ -471,7 +476,7 @@ def test_mat_read(self): # should have a flip. The 'mat' matrix does include flips and so # should be unaffected by the flipping. If both are present we prefer # the the 'mat' matrix. - assert_true(img.header.default_x_flip) # check the default + assert img.header.default_x_flip # check the default flipper = np.diag([-1, 1, 1, 1]) assert_array_equal(mats['M'], np.dot(aff, np.dot(flipper, from_111))) mat_fileobj.seek(0) @@ -513,7 +518,7 @@ def test_origin_affine(): assert_array_equal(aff, hdr.get_base_affine()) hdr.set_data_shape((3, 5, 7)) hdr.set_zooms((3, 2, 1)) - assert_true(hdr.default_x_flip) + assert hdr.default_x_flip assert_array_almost_equal( hdr.get_origin_affine(), # from center of image [[-3., 0., 0., 3.], From 393f630e342710a2bd5bf44910abffd37617692c Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 15:31:41 -0500 Subject: [PATCH 538/689] converting more tests nibabel.tests.test_t* --- nibabel/tests/test_testing.py | 103 ++++++++------- nibabel/tests/test_tmpdirs.py | 12 +- nibabel/tests/test_trackvis.py | 233 +++++++++++++++++---------------- 3 files changed, 183 insertions(+), 165 deletions(-) diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 171447560e..65880c2833 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -7,11 +7,10 @@ import numpy as np -from nose.tools import assert_equal, assert_true, assert_false, assert_raises -from ..testing import (error_warnings, suppress_warnings, +from ..testing_pytest import (error_warnings, suppress_warnings, clear_and_catch_warnings, assert_allclose_safely, get_fresh_mod, assert_re_in, test_data, data_path) -import pytest; pytestmark = pytest.mark.skip() +import pytest def test_assert_allclose_safely(): # Test the safe version of allclose @@ -19,7 +18,8 @@ def test_assert_allclose_safely(): assert_allclose_safely(1, 1) assert_allclose_safely(1, [1, 1]) assert_allclose_safely([1, 1], 1 + 1e-6) - assert_raises(AssertionError, assert_allclose_safely, [1, 1], 1 + 1e-4) + with pytest.raises(AssertionError): + assert_allclose_safely([1, 1], 1 + 1e-4) # Broadcastable matrices a = np.ones((2, 3)) b = np.ones((3, 2, 3)) @@ -27,24 +27,26 @@ def test_assert_allclose_safely(): a[0, 0] = 1 + eps assert_allclose_safely(a, b) a[0, 0] = 1 + 1.1e-5 - assert_raises(AssertionError, assert_allclose_safely, a, b) + with pytest.raises(AssertionError): + assert_allclose_safely(a, b) # Nans in same place a[0, 0] = np.nan b[:, 0, 0] = np.nan assert_allclose_safely(a, b) # Never equal with nans present, if not matching nans - assert_raises(AssertionError, - assert_allclose_safely, a, b, - match_nans=False) + with pytest.raises(AssertionError): + assert_allclose_safely(a, b, match_nans=False) b[0, 0, 0] = 1 - assert_raises(AssertionError, assert_allclose_safely, a, b) + with pytest.raises(AssertionError): + assert_allclose_safely(a, b) # Test allcloseness of inf, especially np.float128 infs for dtt in np.sctypes['float']: a = np.array([-np.inf, 1, np.inf], dtype=dtt) b = np.array([-np.inf, 1, np.inf], dtype=dtt) assert_allclose_safely(a, b) b[1] = 0 - assert_raises(AssertionError, assert_allclose_safely, a, b) + with pytest.raises(AssertionError): + assert_allclose_safely(a, b) # Empty compares equal to empty assert_allclose_safely([], []) @@ -56,19 +58,19 @@ def assert_warn_len_equal(mod, n_in_context): # warning generated by the tests within the context manager, but no # previous warnings. if 'version' in mod_warns: - assert_equal(len(mod_warns), 2) # including 'version' + assert len(mod_warns) == 2 # including 'version' else: - assert_equal(len(mod_warns), n_in_context) + assert len(mod_warns) == n_in_context def test_clear_and_catch_warnings(): # Initial state of module, no warnings my_mod = get_fresh_mod(__name__) - assert_equal(getattr(my_mod, '__warningregistry__', {}), {}) + assert getattr(my_mod, '__warningregistry__', {}) == {} with clear_and_catch_warnings(modules=[my_mod]): warnings.simplefilter('ignore') warnings.warn('Some warning') - assert_equal(my_mod.__warningregistry__, {}) + assert my_mod.__warningregistry__ == {} # Without specified modules, don't clear warnings during context with clear_and_catch_warnings(): warnings.warn('Some warning') @@ -94,23 +96,26 @@ def test_clear_and_catch_warnings_inherit(): with my_cacw(): warnings.simplefilter('ignore') warnings.warn('Some warning') - assert_equal(my_mod.__warningregistry__, {}) + assert my_mod.__warningregistry__ == {} def test_warn_error(): # Check warning error context manager n_warns = len(warnings.filters) with error_warnings(): - assert_raises(UserWarning, warnings.warn, 'A test') + with pytest.raises(UserWarning): + warnings.warn('A test') with error_warnings() as w: # w not used for anything - assert_raises(UserWarning, warnings.warn, 'A test') - assert_equal(n_warns, len(warnings.filters)) + with pytest.raises(UserWarning): + warnings.warn('A test') + assert n_warns == len(warnings.filters) # Check other errors are propagated def f(): with error_warnings(): raise ValueError('An error') - assert_raises(ValueError, f) + with pytest.raises(ValueError): + f() def test_warn_ignore(): @@ -122,54 +127,60 @@ def test_warn_ignore(): with suppress_warnings() as w: # w not used warnings.warn('Here is a warning, you will not see it') warnings.warn('Nor this one', DeprecationWarning) - assert_equal(n_warns, len(warnings.filters)) + assert n_warns == len(warnings.filters) # Check other errors are propagated def f(): with suppress_warnings(): raise ValueError('An error') - assert_raises(ValueError, f) - - -def test_assert_re_in(): - assert_re_in(".*", "") - assert_re_in(".*", ["any"]) - - # Should do match not search - assert_re_in("ab", "abc") - assert_raises(AssertionError, assert_re_in, "ab", "cab") - assert_raises(AssertionError, assert_re_in, "ab$", "abc") + with pytest.raises(ValueError): + f() +@pytest.mark.parametrize("args", [ + [".*", ""], + [".*", ["any"]], + ["ab", "abc"], # Sufficient to have one entry matching - assert_re_in("ab", ["", "abc", "laskdjf"]) - assert_raises(AssertionError, assert_re_in, "ab$", ["ddd", ""]) - + ["ab", ["", "abc", "laskdjf"]], # Tuples should be ok too - assert_re_in("ab", ("", "abc", "laskdjf")) - assert_raises(AssertionError, assert_re_in, "ab$", ("ddd", "")) + ["ab", ("", "abc", "laskdjf")] +]) +def test_assert_re_in(args): + assert_re_in(*args) - # Shouldn't "match" the empty list - assert_raises(AssertionError, assert_re_in, "", []) + +@pytest.mark.parametrize("args", [ + # Should do match not search + ["ab", "cab"], + ["ab$", "abc"], + ["ab$", ["ddd", ""]], + ["ab$", ("ddd", "")], + #Shouldn't "match" the empty list + ["", []] +]) +def test_assert_re_in_exception(args): + with pytest.raises(AssertionError): + assert_re_in(*args) def test_test_data(): - assert_equal(test_data(), data_path) - assert_equal(test_data(), + assert test_data() == data_path + assert (test_data() == os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'tests', 'data'))) for subdir in ('nicom', 'gifti', 'externals'): - assert_equal(test_data(subdir), os.path.join(data_path[:-10], subdir, 'tests', 'data')) - assert_true(os.path.exists(test_data(subdir))) - assert_false(os.path.exists(test_data(subdir, 'doesnotexist'))) + assert test_data(subdir) == os.path.join(data_path[:-10], subdir, 'tests', 'data') + assert os.path.exists(test_data(subdir)) + assert not os.path.exists(test_data(subdir, 'doesnotexist')) for subdir in ('freesurfer', 'doesnotexist'): - with assert_raises(ValueError): + with pytest.raises(ValueError): test_data(subdir) - assert_false(os.path.exists(test_data(None, 'doesnotexist'))) + assert not os.path.exists(test_data(None, 'doesnotexist')) for subdir, fname in [('gifti', 'ascii.gii'), ('nicom', '0.dcm'), ('externals', 'example_1.nc'), (None, 'empty.tck')]: - assert_true(os.path.exists(test_data(subdir, fname))) + assert os.path.exists(test_data(subdir, fname)) diff --git a/nibabel/tests/test_tmpdirs.py b/nibabel/tests/test_tmpdirs.py index 7914e273e1..c4d119b14f 100644 --- a/nibabel/tests/test_tmpdirs.py +++ b/nibabel/tests/test_tmpdirs.py @@ -5,8 +5,6 @@ from ..tmpdirs import InGivenDirectory -from nose.tools import assert_true, assert_equal -import pytest; pytestmark = pytest.mark.skip() MY_PATH = abspath(__file__) MY_DIR = dirname(MY_PATH) @@ -16,10 +14,10 @@ def test_given_directory(): # Test InGivenDirectory cwd = getcwd() with InGivenDirectory() as tmpdir: - assert_equal(tmpdir, abspath(cwd)) - assert_equal(tmpdir, abspath(getcwd())) + assert tmpdir == abspath(cwd) + assert tmpdir == abspath(getcwd()) with InGivenDirectory(MY_DIR) as tmpdir: - assert_equal(tmpdir, MY_DIR) - assert_equal(realpath(MY_DIR), realpath(abspath(getcwd()))) + assert tmpdir == MY_DIR + assert realpath(MY_DIR) == realpath(abspath(getcwd())) # We were deleting the Given directory! Check not so now. - assert_true(isfile(MY_PATH)) + assert isfile(MY_PATH) diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index 676ee52d87..15675882eb 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -9,16 +9,14 @@ from ..orientations import aff2axcodes from ..volumeutils import native_code, swapped_code -from numpy.testing import assert_array_almost_equal -from ..testing import (assert_true, assert_false, assert_equal, assert_raises, assert_warns, - assert_array_equal, suppress_warnings) -import pytest; pytestmark = pytest.mark.skip() +from numpy.testing import assert_array_almost_equal, assert_array_equal +import pytest def test_write(): streams = [] out_f = BytesIO() tv.write(out_f, [], {}) - assert_equal(out_f.getvalue(), tv.empty_header().tostring()) + assert out_f.getvalue() == tv.empty_header().tostring() out_f.truncate(0) out_f.seek(0) # Write something not-default @@ -26,7 +24,7 @@ def test_write(): # read it back out_f.seek(0) streams, hdr = tv.read(out_f) - assert_equal(hdr['id_string'], b'TRACKb') + assert hdr['id_string'] == b'TRACKb' # check that we can pass none for the header out_f.truncate(0) out_f.seek(0) @@ -37,12 +35,12 @@ def test_write(): # check that we check input values out_f.truncate(0) out_f.seek(0) - assert_raises(tv.HeaderError, - tv.write, out_f, [], {'id_string': 'not OK'}) - assert_raises(tv.HeaderError, - tv.write, out_f, [], {'version': 3}) - assert_raises(tv.HeaderError, - tv.write, out_f, [], {'hdr_size': 0}) + with pytest.raises(tv.HeaderError): + tv.write(out_f, [], {'id_string': 'not OK'}) + with pytest.raises(tv.HeaderError): + tv.write(out_f, [], {'version': 3}) + with pytest.raises(tv.HeaderError): + tv.write(out_f, [], {'hdr_size': 0}) def test_write_scalars_props(): @@ -57,26 +55,31 @@ def test_write_scalars_props(): out_f = BytesIO() streams = [(points, None, None), (points, scalars, None)] - assert_raises(tv.DataError, tv.write, out_f, streams) + with pytest.raises(tv.DataError): + tv.write(out_f, streams) out_f.seek(0) streams = [(points, np.zeros((N, M + 1)), None), (points, scalars, None)] - assert_raises(tv.DataError, tv.write, out_f, streams) + with pytest.raises(tv.DataError): + tv.write(out_f, streams) # Or if scalars different N compared to points bad_scalars = np.zeros((N + 1, M)) out_f.seek(0) streams = [(points, bad_scalars, None), (points, bad_scalars, None)] - assert_raises(tv.DataError, tv.write, out_f, streams) + with pytest.raises(tv.DataError): + tv.write(out_f, streams) # Similarly properties must have the same length for each streamline out_f.seek(0) streams = [(points, scalars, None), (points, scalars, props)] - assert_raises(tv.DataError, tv.write, out_f, streams) + with pytest.raises(tv.DataError): + tv.write(out_f, streams) out_f.seek(0) streams = [(points, scalars, np.zeros((P + 1,))), (points, scalars, props)] - assert_raises(tv.DataError, tv.write, out_f, streams) + with pytest.raises(tv.DataError): + tv.write(out_f, streams) # If all is OK, then we get back what we put in out_f.seek(0) streams = [(points, scalars, props), @@ -134,7 +137,7 @@ def test_round_trip(): tv.write(out_f, streams, {}) out_f.seek(0) streams2, hdr = tv.read(out_f) - assert_true(streamlist_equal(streams, streams2)) + assert streamlist_equal(streams, streams2) # test that we can write in different endianness and get back same result, # for versions 1, 2 and not-specified for in_dict, back_version in (({}, 2), @@ -145,15 +148,15 @@ def test_round_trip(): tv.write(out_f, streams, in_dict, endian_code) out_f.seek(0) streams2, hdr = tv.read(out_f) - assert_true(streamlist_equal(streams, streams2)) - assert_equal(hdr['version'], back_version) + assert streamlist_equal(streams, streams2) + assert hdr['version'] == back_version # test that we can get out and pass in generators out_f.seek(0) streams3, hdr = tv.read(out_f, as_generator=True) # check this is a generator rather than a list - assert_true(hasattr(streams3, 'send')) + assert hasattr(streams3, 'send') # but that it results in the same output - assert_true(streamlist_equal(streams, list(streams3))) + assert streamlist_equal(streams, list(streams3)) # write back in out_f.seek(0) streams3, hdr = tv.read(out_f, as_generator=True) @@ -164,7 +167,7 @@ def test_round_trip(): # and re-read just to check out_f_write.seek(0) streams2, hdr = tv.read(out_f_write) - assert_true(streamlist_equal(streams, streams2)) + assert streamlist_equal(streams, streams2) def test_points_processing(): @@ -192,11 +195,11 @@ def _rt(streams, hdr, points_space): # voxmm is the default. In this case we don't do anything to the # points, and we let the header pass through without further checks (raw_streams, hdr), (proc_streams, _) = _rt(vxmm_streams, {}, None) - assert_true(streamlist_equal(raw_streams, proc_streams)) - assert_true(streamlist_equal(vxmm_streams, proc_streams)) + assert streamlist_equal(raw_streams, proc_streams) + assert streamlist_equal(vxmm_streams, proc_streams) (raw_streams, hdr), (proc_streams, _) = _rt(vxmm_streams, {}, 'voxmm') - assert_true(streamlist_equal(raw_streams, proc_streams)) - assert_true(streamlist_equal(vxmm_streams, proc_streams)) + assert streamlist_equal(raw_streams, proc_streams) + assert streamlist_equal(vxmm_streams, proc_streams) # with 'voxels' as input, check for not all voxel_size == 0, warn if any # voxel_size == 0 for hdr in ( # these cause read / write errors @@ -207,21 +210,23 @@ def _rt(streams, hdr, points_space): ): # Check error on write out_f.seek(0) - assert_raises(tv.HeaderError, - tv.write, out_f, vx_streams, hdr, None, 'voxel') + with pytest.raises(tv.HeaderError): + tv.write(out_f, vx_streams, hdr, None, 'voxel') out_f.seek(0) # bypass write error and check read tv.write(out_f, vxmm_streams, hdr, None, points_space=None) out_f.seek(0) - assert_raises(tv.HeaderError, tv.read, out_f, False, 'voxel') + with pytest.raises(tv.HeaderError): + tv.read(out_f, False, 'voxel') # There's a warning for any voxel sizes == 0 hdr = {'voxel_size': [2, 3, 0]} - assert_warns(UserWarning, _rt, vx_streams, hdr, 'voxel') + with pytest.warns(UserWarning): + _rt(vx_streams, hdr, 'voxel') # This should be OK hdr = {'voxel_size': [2, 3, 4]} (raw_streams, hdr), (proc_streams, _) = _rt(vx_streams, hdr, 'voxel') - assert_true(streamlist_equal(vxmm_streams, raw_streams)) - assert_true(streamlist_equal(vx_streams, proc_streams)) + assert streamlist_equal(vxmm_streams, raw_streams) + assert streamlist_equal(vx_streams, proc_streams) # Now we try with rasmm points. In this case we need valid voxel_size, # and voxel_order, and vox_to_ras. The voxel_order has to match the # vox_to_ras, and so do the voxel sizes @@ -247,19 +252,20 @@ def _rt(streams, hdr, points_space): ): # Check error on write out_f.seek(0) - assert_raises(tv.HeaderError, - tv.write, out_f, rasmm_streams, hdr, None, 'rasmm') + with pytest.raises(tv.HeaderError): + tv.write(out_f, rasmm_streams, hdr, None, 'rasmm') out_f.seek(0) # bypass write error and check read tv.write(out_f, vxmm_streams, hdr, None, points_space=None) out_f.seek(0) - assert_raises(tv.HeaderError, tv.read, out_f, False, 'rasmm') + with pytest.raises(tv.HeaderError): + tv.read(out_f, False, 'rasmm') # This should be OK hdr = {'voxel_size': [2, 3, 4], 'voxel_order': 'RAS', 'vox_to_ras': aff} (raw_streams, hdr), (proc_streams, _) = _rt(rasmm_streams, hdr, 'rasmm') - assert_true(streamlist_equal(vxmm_streams, raw_streams)) - assert_true(streamlist_equal(rasmm_streams, proc_streams)) + assert streamlist_equal(vxmm_streams, raw_streams) + assert streamlist_equal(rasmm_streams, proc_streams) # More complex test to check matrix orientation fancy_affine = np.array([[0., -2, 0, 10], [3, 0, 0, 20], @@ -278,73 +284,73 @@ def f(pts): # from vx to mm (ijk1 * [[3, 2, 4]], scalars[1], None)] (raw_streams, hdr), (proc_streams, _) = _rt( fancy_rasmm_streams, hdr, 'rasmm') - assert_true(streamlist_equal(fancy_vxmm_streams, raw_streams)) - assert_true(streamlist_equal(fancy_rasmm_streams, proc_streams)) + assert streamlist_equal(fancy_vxmm_streams, raw_streams) + assert streamlist_equal(fancy_rasmm_streams, proc_streams) def test__check_hdr_points_space(): # Test checking routine for points_space input given header # None or voxmm -> no checks, pass through - assert_equal(tv._check_hdr_points_space({}, None), None) - assert_equal(tv._check_hdr_points_space({}, 'voxmm'), None) + assert tv._check_hdr_points_space({}, None) == None + assert tv._check_hdr_points_space({}, 'voxmm') == None # strange value for points_space -> ValueError - assert_raises(ValueError, - tv._check_hdr_points_space, {}, 'crazy') + with pytest.raises(ValueError): + tv._check_hdr_points_space({}, 'crazy') # Input not in (None, 'voxmm', 'voxels', 'rasmm') - error # voxels means check voxel sizes present and not all 0. hdr = tv.empty_header() assert_array_equal(hdr['voxel_size'], [0, 0, 0]) - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'voxel') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'voxel') # Negative voxel size gives error - because it is not what trackvis does, # and this not what we mean by 'voxmm' hdr['voxel_size'] = [-2, 3, 4] - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'voxel') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'voxel') # Warning here only hdr['voxel_size'] = [2, 3, 0] - assert_warns(UserWarning, - tv._check_hdr_points_space, hdr, 'voxel') + with pytest.warns(UserWarning): + tv._check_hdr_points_space(hdr, 'voxel') # This is OK hdr['voxel_size'] = [2, 3, 4] - assert_equal(tv._check_hdr_points_space(hdr, 'voxel'), None) + assert tv._check_hdr_points_space(hdr, 'voxel') == None # rasmm - check there is an affine, that it matches voxel_size and # voxel_order # no affine hdr['voxel_size'] = [2, 3, 4] - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'rasmm') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'rasmm') # still no affine hdr['voxel_order'] = 'RAS' - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'rasmm') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'rasmm') # nearly an affine, but 0 at position 3,3 - means not recorded in trackvis # standard hdr['vox_to_ras'] = np.diag([2, 3, 4, 0]) - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'rasmm') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'rasmm') # This affine doesn't match RAS voxel order hdr['vox_to_ras'] = np.diag([-2, 3, 4, 1]) - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'rasmm') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'rasmm') # This affine doesn't match the voxel size hdr['vox_to_ras'] = np.diag([3, 3, 4, 1]) - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'rasmm') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'rasmm') # This should be OK good_aff = np.diag([2, 3, 4, 1]) hdr['vox_to_ras'] = good_aff - assert_equal(tv._check_hdr_points_space(hdr, 'rasmm'), + assert (tv._check_hdr_points_space(hdr, 'rasmm') == None) # Default voxel order of LPS assumed hdr['voxel_order'] = '' # now the RAS affine raises an error - assert_raises(tv.HeaderError, - tv._check_hdr_points_space, hdr, 'rasmm') + with pytest.raises(tv.HeaderError): + tv._check_hdr_points_space(hdr, 'rasmm') # this affine does have LPS voxel order good_lps = np.dot(np.diag([-1, -1, 1, 1]), good_aff) hdr['vox_to_ras'] = good_lps - assert_equal(tv._check_hdr_points_space(hdr, 'rasmm'), + assert (tv._check_hdr_points_space(hdr, 'rasmm') == None) @@ -352,16 +358,16 @@ def test_empty_header(): for endian in '<>': for version in (1, 2): hdr = tv.empty_header(endian, version) - assert_equal(hdr['id_string'], b'TRACK') - assert_equal(hdr['version'], version) - assert_equal(hdr['hdr_size'], 1000) + assert hdr['id_string'] == b'TRACK' + assert hdr['version'] == version + assert hdr['hdr_size'] == 1000 assert_array_equal( hdr['image_orientation_patient'], [0, 0, 0, 0, 0, 0]) hdr = tv.empty_header(version=2) assert_array_equal(hdr['vox_to_ras'], np.zeros((4, 4))) hdr_endian = tv.endian_codes[tv.empty_header().dtype.byteorder] - assert_equal(hdr_endian, tv.native_code) + assert hdr_endian == tv.native_code def test_get_affine(): @@ -391,11 +397,12 @@ def test_get_affine(): exp_aff) # check against voxel order. This one works hdr['voxel_order'] = ''.join(aff2axcodes(exp_aff)) - assert_equal(hdr['voxel_order'], b'RAS') + assert hdr['voxel_order'] == b'RAS' assert_array_equal(old_afh(hdr), exp_aff) # This one doesn't hdr['voxel_order'] = 'LAS' - assert_raises(tv.HeaderError, old_afh, hdr) + with pytest.raises(tv.HeaderError): + old_afh(hdr) # This one does work because the routine allows the final dimension to # be flipped to try and match the voxel order hdr['voxel_order'] = 'RAI' @@ -411,11 +418,12 @@ def test_get_affine(): tv.aff_to_hdr(in_aff, hdr, pos_vox=True, set_order=True) # Unset easier option hdr['vox_to_ras'] = 0 - assert_equal(hdr['voxel_order'], o_codes) + assert hdr['voxel_order'] == o_codes # Check it came back the way we wanted assert_array_equal(old_afh(hdr), in_aff) # Check that v1 header raises error - assert_raises(tv.HeaderError, tv.aff_from_hdr, hdr) + with pytest.raises(tv.HeaderError): + tv.aff_from_hdr(hdr) # now use the easier vox_to_ras field hdr = tv.empty_header() aff = np.eye(4) @@ -453,28 +461,28 @@ def test_aff_to_hdr(): for hdr in ({}, {'version': 2}, {'version': 1}): tv.aff_to_hdr(aff2, hdr, pos_vox=True, set_order=False) assert_array_equal(hdr['voxel_size'], [1, 2, 3]) - assert_false('voxel_order' in hdr) + assert not 'voxel_order' in hdr tv.aff_to_hdr(aff2, hdr, pos_vox=False, set_order=True) assert_array_equal(hdr['voxel_size'], [-1, 2, 3]) - assert_equal(hdr['voxel_order'], 'RAI') + assert hdr['voxel_order'] == 'RAI' tv.aff_to_hdr(aff2, hdr, pos_vox=True, set_order=True) assert_array_equal(hdr['voxel_size'], [1, 2, 3]) - assert_equal(hdr['voxel_order'], 'RAI') + assert hdr['voxel_order'] == 'RAI' if 'version' in hdr and hdr['version'] == 1: - assert_false('vox_to_ras' in hdr) + assert not 'vox_to_ras' in hdr else: assert_array_equal(hdr['vox_to_ras'], aff2) def test_tv_class(): tvf = tv.TrackvisFile([]) - assert_equal(tvf.streamlines, []) - assert_true(isinstance(tvf.header, np.ndarray)) - assert_equal(tvf.endianness, tv.native_code) - assert_equal(tvf.filename, None) + assert tvf.streamlines == [] + assert isinstance(tvf.header, np.ndarray) + assert tvf.endianness == tv.native_code + assert tvf.filename == None out_f = BytesIO() tvf.to_file(out_f) - assert_equal(out_f.getvalue(), tv.empty_header().tostring()) + assert out_f.getvalue() == tv.empty_header().tostring() out_f.truncate(0) out_f.seek(0) # Write something not-default @@ -483,19 +491,16 @@ def test_tv_class(): # read it back out_f.seek(0) tvf_back = tv.TrackvisFile.from_file(out_f) - assert_equal(tvf_back.header['id_string'], b'TRACKb') + assert tvf_back.header['id_string'] == b'TRACKb' # check that we check input values out_f.truncate(0) out_f.seek(0) - assert_raises(tv.HeaderError, - tv.TrackvisFile, - [], {'id_string': 'not OK'}) - assert_raises(tv.HeaderError, - tv.TrackvisFile, - [], {'version': 3}) - assert_raises(tv.HeaderError, - tv.TrackvisFile, - [], {'hdr_size': 0}) + with pytest.raises(tv.HeaderError): + tv.TrackvisFile([], {'id_string': 'not OK'}) + with pytest.raises(tv.HeaderError): + tv.TrackvisFile([], {'version': 3}) + with pytest.raises(tv.HeaderError): + tv.TrackvisFile([], {'hdr_size': 0}) affine = np.diag([1, 2, 3, 1]) affine[:3, 3] = [10, 11, 12] # affine methods will raise same warnings and errors as function @@ -503,9 +508,8 @@ def test_tv_class(): aff = tvf.get_affine(atleast_v2=True) assert_array_almost_equal(aff, affine) # Test that we raise an error with an iterator - assert_raises(tv.TrackvisFileError, - tv.TrackvisFile, - iter([])) + with pytest.raises(tv.TrackvisFileError): + tv.TrackvisFile(iter([])) def test_tvfile_io(): @@ -521,22 +525,23 @@ def test_tvfile_io(): tvf.to_file(out_f) out_f.seek(0) tvf2 = tv.TrackvisFile.from_file(out_f) - assert_equal(tvf2.filename, None) - assert_true(streamlist_equal(vxmm_streams, tvf2.streamlines)) - assert_equal(tvf2.points_space, None) + assert tvf2.filename == None + assert streamlist_equal(vxmm_streams, tvf2.streamlines) + assert tvf2.points_space == None # Voxel points_space tvf = tv.TrackvisFile(vx_streams, points_space='voxel') out_f.seek(0) # No voxel size - error - assert_raises(tv.HeaderError, tvf.to_file, out_f) + with pytest.raises(tv.HeaderError): + tvf.to_file(out_f) out_f.seek(0) # With voxel size, no error, roundtrip works tvf.header['voxel_size'] = [2, 3, 4] tvf.to_file(out_f) out_f.seek(0) tvf2 = tv.TrackvisFile.from_file(out_f, points_space='voxel') - assert_true(streamlist_equal(vx_streams, tvf2.streamlines)) - assert_equal(tvf2.points_space, 'voxel') + assert streamlist_equal(vx_streams, tvf2.streamlines) + assert tvf2.points_space == 'voxel' out_f.seek(0) # Also with affine specified tvf = tv.TrackvisFile(vx_streams, points_space='voxel', @@ -544,7 +549,7 @@ def test_tvfile_io(): tvf.to_file(out_f) out_f.seek(0) tvf2 = tv.TrackvisFile.from_file(out_f, points_space='voxel') - assert_true(streamlist_equal(vx_streams, tvf2.streamlines)) + assert streamlist_equal(vx_streams, tvf2.streamlines) # Fancy affine test fancy_affine = np.array([[0., -2, 0, 10], [3, 0, 0, 20], @@ -560,15 +565,16 @@ def f(pts): # from vx to mm tvf = tv.TrackvisFile(fancy_rasmm_streams, points_space='rasmm') out_f.seek(0) # No affine - assert_raises(tv.HeaderError, tvf.to_file, out_f) + with pytest.raises(tv.HeaderError): + tvf.to_file(out_f) out_f.seek(0) # With affine set, no error, roundtrip works tvf.set_affine(fancy_affine, pos_vox=True, set_order=True) tvf.to_file(out_f) out_f.seek(0) tvf2 = tv.TrackvisFile.from_file(out_f, points_space='rasmm') - assert_true(streamlist_equal(fancy_rasmm_streams, tvf2.streamlines)) - assert_equal(tvf2.points_space, 'rasmm') + assert streamlist_equal(fancy_rasmm_streams, tvf2.streamlines) + assert tvf2.points_space == 'rasmm' out_f.seek(0) # Also when affine given in init tvf = tv.TrackvisFile(fancy_rasmm_streams, points_space='rasmm', @@ -576,7 +582,7 @@ def f(pts): # from vx to mm tvf.to_file(out_f) out_f.seek(0) tvf2 = tv.TrackvisFile.from_file(out_f, points_space='rasmm') - assert_true(streamlist_equal(fancy_rasmm_streams, tvf2.streamlines)) + assert streamlist_equal(fancy_rasmm_streams, tvf2.streamlines) def test_read_truncated(): @@ -590,26 +596,29 @@ def test_read_truncated(): value = out_f.getvalue()[:-(3 * 4)] new_f = BytesIO(value) # By default, raises a DataError - assert_raises(tv.DataError, tv.read, new_f) + with pytest.raises(tv.DataError): + tv.read(new_f) # This corresponds to strict mode new_f.seek(0) - assert_raises(tv.DataError, tv.read, new_f, strict=True) + with pytest.raises(tv.DataError): + tv.read(new_f, strict=True) # lenient error mode lets this error pass, with truncated track short_streams = [(xyz0, None, None), (xyz1[:-1], None, None)] new_f.seek(0) streams2, hdr = tv.read(new_f, strict=False) - assert_true(streamlist_equal(streams2, short_streams)) + assert streamlist_equal(streams2, short_streams) # Check that lenient works when number of tracks is 0, where 0 signals to # the reader to read until the end of the file. again_hdr = hdr.copy() - assert_equal(again_hdr['n_count'], 2) + assert again_hdr['n_count'] == 2 again_hdr['n_count'] = 0 again_bytes = again_hdr.tostring() + value[again_hdr.itemsize:] again_f = BytesIO(again_bytes) streams2, _ = tv.read(again_f, strict=False) - assert_true(streamlist_equal(streams2, short_streams)) + assert streamlist_equal(streams2, short_streams) # Set count to one above actual number of tracks, always raise error again_hdr['n_count'] = 3 again_bytes = again_hdr.tostring() + value[again_hdr.itemsize:] again_f = BytesIO(again_bytes) - assert_raises(tv.DataError, tv.read, again_f, strict=False) + with pytest.raises(tv.DataError): + tv.read(again_f, strict=False) From 7aaa2d08a17431afd4fbc8750d5e6e4eec288642 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 15:36:11 -0500 Subject: [PATCH 539/689] converting test_volumeutils --- nibabel/tests/test_volumeutils.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 711894aabc..1591dae005 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -20,13 +20,11 @@ import bz2 import threading import time -import pytest import numpy as np from ..tmpdirs import InTemporaryDirectory from ..openers import ImageOpener -from .. import volumeutils from ..volumeutils import (array_from_file, _is_compressed_fobj, array_to_file, @@ -57,11 +55,10 @@ from numpy.testing import (assert_array_almost_equal, assert_array_equal) -from nose.tools import assert_raises +import pytest from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, suppress_warnings, clear_and_catch_warnings) -import pytest; pytestmark = pytest.mark.skip() #: convenience variables for numpy types FLOAT_TYPES = np.sctypes['float'] @@ -73,11 +70,11 @@ def test_deprecated_functions(): - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): scale_min_max(0, 1, np.uint8, True) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): calculate_scale(np.array([-2, -1], dtype=np.int8), np.uint8, True) - with assert_raises(ExpiredDeprecationError): + with pytest.raises(ExpiredDeprecationError): can_cast(np.float32, np.float32) From 4ee885592725022f0d2a434bf9aa38583fdddf87 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 15:56:41 -0500 Subject: [PATCH 540/689] converting nibabel_data --- nibabel/tests/nibabel_data.py | 10 +++++----- nibabel/tests/test_optpkg.py | 4 +--- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 453c0af9ec..0843293d10 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -4,7 +4,7 @@ from os import environ, listdir from os.path import dirname, realpath, join as pjoin, isdir, exists -from ..testing import skipif +import pytest def get_nibabel_data(): @@ -39,11 +39,11 @@ def needs_nibabel_data(subdir=None): """ nibabel_data = get_nibabel_data() if nibabel_data == '': - return skipif(True, "Need nibabel-data directory for this test") + return pytest.mark.skipif(True, reason="Need nibabel-data directory for this test") if subdir is None: - return skipif(False) + return pytest.mark.skipif(False, reason="todo") required_path = pjoin(nibabel_data, subdir) # Path should not be empty (as is the case for not-updated submodules) have_files = exists(required_path) and len(listdir(required_path)) > 0 - return skipif(not have_files, - "Need files in {0} for these tests".format(required_path)) + return pytest.mark.skipif(not have_files, + reason="Need files in {0} for these tests".format(required_path)) diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index a6b9571530..387cebecba 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -7,8 +7,7 @@ import builtins from distutils.version import LooseVersion -# TODO: remove (have to be coordinated with optpkg) -from nose import SkipTest +from unittest import SkipTest import pytest from nibabel.optpkg import optional_package @@ -28,7 +27,6 @@ def assert_bad(pkg_name, min_version=None): assert isinstance(pkg, TripWire) with pytest.raises(TripWireError): getattr(pkg, 'a_method') - # TODO: remove with pytest.raises(SkipTest): setup() From a1704970fff7d40f9846ebfc608d9476fc1c6c65 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 541/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 0843293d10..ad4ac98a16 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From ff3cb80b6bc94b78b310d401ed471de49814f2e1 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 21:51:26 -0500 Subject: [PATCH 542/689] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- nibabel/tests/nibabel_data.py | 4 ++-- nibabel/tests/test_spaces.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index ad4ac98a16..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -41,9 +41,9 @@ def needs_nibabel_data(subdir=None): if nibabel_data == '': return pytest.mark.skipif(True, reason="Need nibabel-data directory for this test") if subdir is None: - return pytest.mark.skipif(False, reason="todo") + return pytest.mark.skipif(False, reason="Don't skip") required_path = pjoin(nibabel_data, subdir) # Path should not be empty (as is the case for not-updated submodules) have_files = exists(required_path) and len(listdir(required_path)) > 0 return pytest.mark.skipif(not have_files, - reason="Need files in {0} for these tests".format(required_path)) + reason="Need files in {0} for these tests".format(required_path)) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 5459571954..0b301dae61 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -116,11 +116,11 @@ def test_slice2volume(): assert (slice2volume(val, axis) == exp_aff).all() -@pytest.mark.parametrize("args", [ +@pytest.mark.parametrize("index, axis", [ [-1, 0], [0, -1], [0, 3] ]) def test_slice2volume_exception(args): with pytest.raises(ValueError): - slice2volume(*args) \ No newline at end of file + slice2volume(*args) From e87d3a5c48038671cd5a0fe3ee79857052ec72d4 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:03:27 -0500 Subject: [PATCH 543/689] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- nibabel/tests/test_spatialimages.py | 2 +- nibabel/tests/test_trackvis.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 288096638e..4e3547f1d9 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -162,7 +162,7 @@ def test_affine(): [0, 2, 0, -1], [0, 0, 1, -1], [0, 0, 0, 1]]) - assert (hdr.get_base_affine() == hdr.get_best_affine()).all() + assert np.array_equal(hdr.get_base_affine(), hdr.get_best_affine()) def test_read_data(): diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index 15675882eb..bcfbfe673d 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -525,9 +525,9 @@ def test_tvfile_io(): tvf.to_file(out_f) out_f.seek(0) tvf2 = tv.TrackvisFile.from_file(out_f) - assert tvf2.filename == None + assert tvf2.filename is None assert streamlist_equal(vxmm_streams, tvf2.streamlines) - assert tvf2.points_space == None + assert tvf2.points_space is None # Voxel points_space tvf = tv.TrackvisFile(vx_streams, points_space='voxel') out_f.seek(0) From f2931abc6e0c1b087583950b6a224fa51da02237 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 544/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From af8b38696891c8c9f1faba658a04e5fdef2d49f4 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 545/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 06210d57062729be7c57d651a7e435e30f3f0403 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 546/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From dd42dd8aa3645318f7a91fa8f594abed11429d4a Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 547/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 23 +++++++++++------------ nibabel/tests/test_spatialimages.py | 16 ++++++---------- 2 files changed, 17 insertions(+), 22 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 0b301dae61..acdd61a28e 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) @@ -101,7 +93,14 @@ def test_vox2out_vox(arg_tuple): assert isinstance(shape[0], int) # Enforce number of axes with pytest.raises(ValueError): - vox2out_vox(arg_tuple) + vox2out_vox(((2, 3, 4, 5), np.eye(4))) + with pytest.raises(ValueError): + vox2out_vox(((2, 3, 4, 5, 6), np.eye(4))) + # Voxel sizes must be positive + with pytest.raises(ValueError): + vox2out_vox(((2, 3, 4), np.eye(4), [-1, 1, 1])) + with pytest.raises(ValueError): + vox2out_vox(((2, 3, 4), np.eye(4), [1, 0, 1])) def test_slice2volume(): @@ -121,6 +120,6 @@ def test_slice2volume(): [0, -1], [0, 3] ]) -def test_slice2volume_exception(args): +def test_slice2volume_exception(index, axis): with pytest.raises(ValueError): - slice2volume(*args) + slice2volume(index, axis) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 4e3547f1d9..5959057fc9 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -173,18 +173,15 @@ class CHeader(SpatialHeader): fobj = BytesIO() data = np.arange(6).reshape((1, 2, 3)) hdr.data_to_fileobj(data, fobj) - assert (fobj.getvalue() == - data.astype(np.int32).tostring(order=order)) + assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) # data_to_fileobj accepts kwarg 'rescale', but no effect in this case fobj.seek(0) hdr.data_to_fileobj(data, fobj, rescale=True) - assert (fobj.getvalue() == - data.astype(np.int32).tostring(order=order)) + assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) # data_to_fileobj can be a list fobj.seek(0) hdr.data_to_fileobj(data.tolist(), fobj, rescale=True) - assert (fobj.getvalue() == - data.astype(np.int32).tostring(order=order)) + assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) # Read data back again fobj.seek(0) data2 = hdr.data_from_fileobj(fobj) @@ -464,8 +461,8 @@ def test_slicer(self): img.slicer[:, :, :, None] else: # Reorder non-spatial axes - assert (img.slicer[:, :, :, None].shape == - img.shape[:3] + (1,) + img.shape[3:]) + assert (img.slicer[:, :, :, None].shape + == img.shape[:3] + (1,) + img.shape[3:]) # 4D to 3D using ellipsis or slices assert img.slicer[..., 0].shape == img.shape[:-1] assert img.slicer[:, :, :, 0].shape == img.shape[:-1] @@ -480,8 +477,7 @@ def test_slicer(self): with pytest.raises(ValueError): img.slicer[:, :, :, :, None] else: - assert (img.slicer[:, :, :, :, None].shape == - img.shape + (1,)) + assert img.slicer[:, :, :, :, None].shape == img.shape + (1,) # Crop by one voxel in each dimension sliced_i = img.slicer[1:] From e55940da85f795fb665b6b542ce1919607fcba1e Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:45:47 -0500 Subject: [PATCH 548/689] ignoring tests with pytest.mark.parametrize --- .azure-pipelines/windows.yml | 3 +++ .travis.yml | 3 +++ 2 files changed, 6 insertions(+) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 89b47b8345..f63b5f48c3 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -99,6 +99,9 @@ jobs: -I test_round_trip ^ -I test_rstutils ^ -I test_scaling ^ + -I test_scripts ^ + -I test_spaces ^ + -I test_testing ^ -I test_wrapstruct displayName: 'Nose tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) diff --git a/.travis.yml b/.travis.yml index b869d33947..e8b20252f0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -186,6 +186,9 @@ script: -I test_round_trip \ -I test_rstutils \ -I test_scaling \ + -I test_scripts \ + -I test_spaces \ + -I test_testing \ -I test_wrapstruct elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation From 1e2caf4c89d7b2bfdb40b12f57e29e81f9ee8f94 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 549/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From b1fe9214b916675b914dfc284669db84c52b59c9 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 550/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 31c16985f4e1fe303bb42d46c80ac7bf9440891a Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 551/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 12 ++++++++++-- nibabel/tests/test_spatialimages.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index acdd61a28e..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) @@ -122,4 +130,4 @@ def test_slice2volume(): ]) def test_slice2volume_exception(index, axis): with pytest.raises(ValueError): - slice2volume(index, axis) + slice2volume(index, axis) \ No newline at end of file diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 5959057fc9..d24633d283 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -162,7 +162,11 @@ def test_affine(): [0, 2, 0, -1], [0, 0, 1, -1], [0, 0, 0, 1]]) +<<<<<<< HEAD assert np.array_equal(hdr.get_base_affine(), hdr.get_best_affine()) +======= + assert (hdr.get_base_affine() == hdr.get_best_affine()).all() +>>>>>>> converting more tests nibabel.tests.test_s* def test_read_data(): @@ -173,6 +177,7 @@ class CHeader(SpatialHeader): fobj = BytesIO() data = np.arange(6).reshape((1, 2, 3)) hdr.data_to_fileobj(data, fobj) +<<<<<<< HEAD assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) # data_to_fileobj accepts kwarg 'rescale', but no effect in this case fobj.seek(0) @@ -182,6 +187,20 @@ class CHeader(SpatialHeader): fobj.seek(0) hdr.data_to_fileobj(data.tolist(), fobj, rescale=True) assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) +======= + assert (fobj.getvalue() == + data.astype(np.int32).tostring(order=order)) + # data_to_fileobj accepts kwarg 'rescale', but no effect in this case + fobj.seek(0) + hdr.data_to_fileobj(data, fobj, rescale=True) + assert (fobj.getvalue() == + data.astype(np.int32).tostring(order=order)) + # data_to_fileobj can be a list + fobj.seek(0) + hdr.data_to_fileobj(data.tolist(), fobj, rescale=True) + assert (fobj.getvalue() == + data.astype(np.int32).tostring(order=order)) +>>>>>>> converting more tests nibabel.tests.test_s* # Read data back again fobj.seek(0) data2 = hdr.data_from_fileobj(fobj) @@ -461,8 +480,13 @@ def test_slicer(self): img.slicer[:, :, :, None] else: # Reorder non-spatial axes +<<<<<<< HEAD assert (img.slicer[:, :, :, None].shape == img.shape[:3] + (1,) + img.shape[3:]) +======= + assert (img.slicer[:, :, :, None].shape == + img.shape[:3] + (1,) + img.shape[3:]) +>>>>>>> converting more tests nibabel.tests.test_s* # 4D to 3D using ellipsis or slices assert img.slicer[..., 0].shape == img.shape[:-1] assert img.slicer[:, :, :, 0].shape == img.shape[:-1] @@ -477,7 +501,12 @@ def test_slicer(self): with pytest.raises(ValueError): img.slicer[:, :, :, :, None] else: +<<<<<<< HEAD assert img.slicer[:, :, :, :, None].shape == img.shape + (1,) +======= + assert (img.slicer[:, :, :, :, None].shape == + img.shape + (1,)) +>>>>>>> converting more tests nibabel.tests.test_s* # Crop by one voxel in each dimension sliced_i = img.slicer[1:] From e099c290d272d5b01b9b4a2c7a4dbb2ed1a1ad55 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 552/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 264d1fa93b30c94d8ab4d395d6e27defb58fa567 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:03:27 -0500 Subject: [PATCH 553/689] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- nibabel/tests/test_spatialimages.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index d24633d283..5959057fc9 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -162,11 +162,7 @@ def test_affine(): [0, 2, 0, -1], [0, 0, 1, -1], [0, 0, 0, 1]]) -<<<<<<< HEAD assert np.array_equal(hdr.get_base_affine(), hdr.get_best_affine()) -======= - assert (hdr.get_base_affine() == hdr.get_best_affine()).all() ->>>>>>> converting more tests nibabel.tests.test_s* def test_read_data(): @@ -177,7 +173,6 @@ class CHeader(SpatialHeader): fobj = BytesIO() data = np.arange(6).reshape((1, 2, 3)) hdr.data_to_fileobj(data, fobj) -<<<<<<< HEAD assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) # data_to_fileobj accepts kwarg 'rescale', but no effect in this case fobj.seek(0) @@ -187,20 +182,6 @@ class CHeader(SpatialHeader): fobj.seek(0) hdr.data_to_fileobj(data.tolist(), fobj, rescale=True) assert fobj.getvalue() == data.astype(np.int32).tostring(order=order) -======= - assert (fobj.getvalue() == - data.astype(np.int32).tostring(order=order)) - # data_to_fileobj accepts kwarg 'rescale', but no effect in this case - fobj.seek(0) - hdr.data_to_fileobj(data, fobj, rescale=True) - assert (fobj.getvalue() == - data.astype(np.int32).tostring(order=order)) - # data_to_fileobj can be a list - fobj.seek(0) - hdr.data_to_fileobj(data.tolist(), fobj, rescale=True) - assert (fobj.getvalue() == - data.astype(np.int32).tostring(order=order)) ->>>>>>> converting more tests nibabel.tests.test_s* # Read data back again fobj.seek(0) data2 = hdr.data_from_fileobj(fobj) @@ -480,13 +461,8 @@ def test_slicer(self): img.slicer[:, :, :, None] else: # Reorder non-spatial axes -<<<<<<< HEAD assert (img.slicer[:, :, :, None].shape == img.shape[:3] + (1,) + img.shape[3:]) -======= - assert (img.slicer[:, :, :, None].shape == - img.shape[:3] + (1,) + img.shape[3:]) ->>>>>>> converting more tests nibabel.tests.test_s* # 4D to 3D using ellipsis or slices assert img.slicer[..., 0].shape == img.shape[:-1] assert img.slicer[:, :, :, 0].shape == img.shape[:-1] @@ -501,12 +477,7 @@ def test_slicer(self): with pytest.raises(ValueError): img.slicer[:, :, :, :, None] else: -<<<<<<< HEAD assert img.slicer[:, :, :, :, None].shape == img.shape + (1,) -======= - assert (img.slicer[:, :, :, :, None].shape == - img.shape + (1,)) ->>>>>>> converting more tests nibabel.tests.test_s* # Crop by one voxel in each dimension sliced_i = img.slicer[1:] From dd0c52195fec1f05a456e49ac9f24a399d2ed504 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 554/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 159956e78333334ab7e7fb6ab1d4fb383b29a858 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 555/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From e0da084b7f3e2cda539dcb00b3d6ec7128caae76 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 556/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 0519f3616fb49e26cbd4b18a552be790f7007867 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 557/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From f4f48ef88fd56676fbfe8f0683957eecf11b9503 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 12:35:53 -0500 Subject: [PATCH 558/689] applying suggestions from 865 --- nibabel/tests/test_arraywriters.py | 8 ++++---- nibabel/tests/test_deprecator.py | 15 +++------------ nibabel/tests/test_fileslice.py | 2 +- nibabel/tests/test_funcs.py | 6 +++--- nibabel/tests/test_image_api.py | 14 ++++---------- nibabel/tests/test_orientations.py | 5 +---- nibabel/tests/test_parrec.py | 16 +++++++--------- nibabel/tests/test_parrec_data.py | 3 +-- nibabel/tests/test_recoder.py | 18 +++++++++--------- nibabel/tests/test_scaling.py | 22 ++++++++++------------ nibabel/tests/test_spatialimages.py | 23 +++++++++-------------- nibabel/tests/test_spm99analyze.py | 3 +-- 12 files changed, 53 insertions(+), 82 deletions(-) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 8f30f04321..1a1c4eb156 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -508,9 +508,9 @@ def test_nan2zero(): assert_array_equal(np.isnan(data_back), [True, False]) # Deprecation warning for nan2zero as argument to `to_fileobj` with error_warnings(): - with pytest.raises(DeprecationWarning): + with pytest.deprecated_call(): aw.to_fileobj(BytesIO(), 'F', True) - with pytest.raises(DeprecationWarning): + with pytest.deprecated_call(): aw.to_fileobj(BytesIO(), 'F', nan2zero=True) # Error if nan2zero is not the value set at initialization with pytest.raises(WriterError): @@ -530,9 +530,9 @@ def test_nan2zero(): assert_array_equal(data_back, [astype_res, 99]) # Deprecation warning for nan2zero as argument to `to_fileobj` with error_warnings(): - with pytest.raises(DeprecationWarning): + with pytest.deprecated_call(): aw.to_fileobj(BytesIO(), 'F', False) - with pytest.raises(DeprecationWarning): + with pytest.deprecated_call(): aw.to_fileobj(BytesIO(), 'F', nan2zero=False) # Error if nan2zero is not the value set at initialization with pytest.raises(WriterError): diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index 14255da4b4..eb9de3799d 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -69,11 +69,8 @@ def test_dep_func(self): # Test function deprecation dec = self.dep_func func = dec('foo')(func_no_doc) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call(): assert func() == None - assert len(w) == 1 - assert w[0].category is DeprecationWarning assert func.__doc__ == 'foo\n' func = dec('foo')(func_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: @@ -135,11 +132,8 @@ def test_dep_func(self): assert w[0].category is UserWarning func = dec('foo', error_class=CustomError)(func_no_doc) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call(): assert func() == None - assert len(w) == 1 - assert w[0].category is DeprecationWarning func = dec('foo', until='1.8', error_class=CustomError)(func_no_doc) with pytest.raises(CustomError): @@ -162,11 +156,8 @@ def test_deprecator_maker(self): dec = self.dep_maker(error_class=CustomError) func = dec('foo')(func_no_doc) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call(): assert func() == None - assert len(w) == 1 - assert w[0].category is DeprecationWarning func = dec('foo', until='1.8')(func_no_doc) with pytest.raises(CustomError): diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index 5c80ae01b5..924da5fc9f 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -47,7 +47,7 @@ def test_is_fancy(): assert not is_fancy((None, 1)) assert not is_fancy((1, None)) # Chack that actual False returned (rather than falsey) - assert is_fancy(1) == False + assert is_fancy(1) is False def test_canonical_slicers(): diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 94645f2839..db196995e0 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -138,7 +138,7 @@ def test_closest_canonical(): # And a case where the Analyze image has to be flipped img = AnalyzeImage(arr, np.diag([-1, 1, 1, 1])) xyz_img = as_closest_canonical(img) - assert not img is xyz_img + assert img is not xyz_img out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) @@ -156,7 +156,7 @@ def test_closest_canonical(): img = Nifti1Image(arr, np.diag([-1, 1, 1, 1])) img.header.set_dim_info(0, 1, 2) xyz_img = as_closest_canonical(img) - assert not img is xyz_img + assert img is not xyz_img assert img.header.get_dim_info() == xyz_img.header.get_dim_info() out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) @@ -181,7 +181,7 @@ def test_closest_canonical(): img.header.set_dim_info(0, 1, 2) xyz_img = as_closest_canonical(img) - assert not img is xyz_img + assert img is not xyz_img # Check both the original and new objects assert img.header.get_dim_info() == (0, 1, 2) assert xyz_img.header.get_dim_info() == (0, 2, 1) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 738a3f1969..34725ba186 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -113,10 +113,8 @@ def validate_header(self, imaker, params): def validate_header_deprecated(self, imaker, params): # Check deprecated header API img = imaker() - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) + with pytest.deprecated_call(): hdr = img.get_header() - assert len(w) == 1 assert hdr is img.header def validate_filenames(self, imaker, params): @@ -171,7 +169,7 @@ def validate_no_slicing(self, imaker, params): def validate_get_data_deprecated(self, imaker, params): # Check deprecated header API img = imaker() - with assert_warns(DeprecationWarning): + with pytest.deprecated_call(): data = img.get_data() assert_array_equal(np.asanyarray(img.dataobj), data) @@ -395,10 +393,8 @@ def _check_array_caching(self, imaker, meth_name, caching): def validate_data_deprecated(self, imaker, params): # Check _data property still exists, but raises warning img = imaker() - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") + with pytest.deprecated_call(): assert_data_similar(img._data, params) - assert warns.pop(0).category == DeprecationWarning # Check setting _data raises error fake_data = np.zeros(img.shape).astype(img.get_data_dtype()) with pytest.raises(AttributeError): @@ -502,10 +498,8 @@ def validate_affine(self, imaker, params): def validate_affine_deprecated(self, imaker, params): # Check deprecated affine API img = imaker() - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) + with pytest.deprecated_call(): assert_almost_equal(img.get_affine(), params['affine'], 6) - assert len(w) == 1 assert img.get_affine().dtype == np.float64 aff = img.get_affine() aff[0, 0] = 1.5 diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 5013828757..226feee526 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -380,9 +380,6 @@ def test_inv_ornt_aff(): def test_orientation_affine_deprecation(): aff1 = inv_ornt_aff([[0, 1], [1, -1], [2, 1]], (3, 4, 5)) - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') + with pytest.deprecated_call(): aff2 = orientation_affine([[0, 1], [1, -1], [2, 1]], (3, 4, 5)) - assert len(warns) == 1 - assert warns[0].category == DeprecationWarning assert_array_equal(aff1, aff2) diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index bb0888b0e7..54c15fde6b 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -263,10 +263,8 @@ def test_affine_regression(): def test_get_voxel_size_deprecated(): hdr = PARRECHeader(HDR_INFO, HDR_DEFS) - with clear_and_catch_warnings(modules=[parrec], record=True) as wlist: - simplefilter('always') + with pytest.deprecated_call(): hdr.get_voxel_size() - assert wlist[0].category == DeprecationWarning def test_get_sorted_slice_indices(): @@ -304,9 +302,9 @@ def test_sorting_dual_echo_T1(): sorted_echos = t1_hdr.image_defs['echo number'][sorted_indices] n_half = len(t1_hdr.image_defs) // 2 # first half (volume 1) should all correspond to echo 1 - assert np.all(sorted_echos[:n_half] == 1) == True + assert np.all(sorted_echos[:n_half] == 1) # second half (volume 2) should all correspond to echo 2 - assert np.all(sorted_echos[n_half:] == 2) == True + assert np.all(sorted_echos[n_half:] == 2) # check volume labels vol_labels = t1_hdr.get_volume_labels() @@ -350,10 +348,10 @@ def test_sorting_multiple_echos_and_contrasts(): assert (np.all(sorted_echos[istart:iend] == current_echo) == True) # outermost sort index is image_type_mr - assert np.all(sorted_types[:ntotal//4] == 0) == True - assert np.all(sorted_types[ntotal//4:ntotal//2] == 1) == True - assert np.all(sorted_types[ntotal//2:3*ntotal//4] == 2) == True - assert np.all(sorted_types[3*ntotal//4:ntotal] == 3) == True + assert np.all(sorted_types[:ntotal//4] == 0) + assert np.all(sorted_types[ntotal//4:ntotal//2] == 1) + assert np.all(sorted_types[ntotal//2:3*ntotal//4] == 2) + assert np.all(sorted_types[3*ntotal//4:ntotal] == 3) # check volume labels vol_labels = t1_hdr.get_volume_labels() diff --git a/nibabel/tests/test_parrec_data.py b/nibabel/tests/test_parrec_data.py index 0cd92bd13f..30295a269a 100644 --- a/nibabel/tests/test_parrec_data.py +++ b/nibabel/tests/test_parrec_data.py @@ -60,8 +60,7 @@ def test_fieldmap(): fieldmap_nii = pjoin(BALLS, 'NIFTI', 'fieldmap.nii.gz') load(fieldmap_par) top_load(fieldmap_nii) - # TODO dj: i believe this shouldn't be here - #raise pytest.skip('Fieldmap remains puzzling') + raise pytest.skip('Fieldmap remains puzzling') @needs_nibabel_data('parrec_oblique') diff --git a/nibabel/tests/test_recoder.py b/nibabel/tests/test_recoder.py index 28eba8860b..8e4edd1b87 100644 --- a/nibabel/tests/test_recoder.py +++ b/nibabel/tests/test_recoder.py @@ -22,14 +22,14 @@ def test_recoder_1(): assert rc.code[1] == 1 assert rc.code[2] == 2 with pytest.raises(KeyError): - rc.code.__getitem__(3) + rc.code[3] def test_recoder_2(): # with explicit name for code codes = ((1,), (2,)) rc = Recoder(codes, ['code1']) with pytest.raises(AttributeError): - rc.__getattribute__('code') + rc.code assert rc.code1[1] == 1 assert rc.code1[2] == 2 @@ -41,20 +41,20 @@ def test_recoder_3(): assert rc.code[1] == 1 assert rc.code[2] == 2 with pytest.raises(KeyError): - rc.code.__getitem__(3) + rc.code[3] assert rc.code['one'] == 1 assert rc.code['two'] == 2 with pytest.raises(KeyError): - rc.code.__getitem__('three') + rc.code['three'] with pytest.raises(AttributeError): - rc.__getattribute__('label') + rc.label def test_recoder_3(): # with explicit column names codes = ((1, 'one'), (2, 'two')) rc = Recoder(codes, ['code1', 'label']) with pytest.raises(AttributeError): - rc.__getattribute__('code') + rc.code assert rc.code1[1] == 1 assert rc.code1['one'] == 1 assert rc.label[1] == 'one' @@ -119,7 +119,7 @@ def test_add_codes(): rc = Recoder(codes) assert rc.code['two'] == 2 with pytest.raises(KeyError): - rc.code.__getitem__('three') + rc.code['three'] rc.add_codes(((3, 'three'), (1, 'number 1'))) assert rc.code['three'] == 3 assert rc.code['number 1'] == 1 @@ -151,7 +151,7 @@ def test_dtmapper(): # dict-like that will lookup on dtypes, even if they don't hash properly d = DtypeMapper() with pytest.raises(KeyError): - d.__getitem__(1) + d[1] d[1] = 'something' assert d[1] == 'something' assert list(d.keys()) == [1] @@ -182,7 +182,7 @@ def test_dtmapper(): sw_dt = canonical_dt.newbyteorder(swapped_code) d[sw_dt] = 'spam' with pytest.raises(KeyError): - d.__getitem__(canonical_dt) + d[canonical_dt] assert d[sw_dt] == 'spam' sw_intp_dt = intp_dt.newbyteorder(swapped_code) assert d[sw_intp_dt] == 'spam' diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index ec335e5c24..db901adb61 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -159,35 +159,33 @@ def test_array_file_scales(in_type, out_type, err): # Max rounding error for integer type max_miss = slope / 2. assert np.all(np.abs(arr - arr3) <= max_miss) - bio.truncate(0) - bio.seek(0) -@pytest.mark.parametrize("category0, category1",[ +@pytest.mark.parametrize("in_type, out_type",[ ('int', 'int'), ('uint', 'int'), ]) -def test_scaling_in_abstract(category0, category1): +def test_scaling_in_abstract(in_type, out_type): # Confirm that, for all ints and uints as input, and all possible outputs, # for any simple way of doing the calculation, the result is near enough - for in_type in np.sctypes[category0]: - for out_type in np.sctypes[category1]: - check_int_a2f(in_type, out_type) + for in_tp in np.sctypes[in_type]: + for out_tp in np.sctypes[out_type]: + check_int_a2f(in_tp, out_tp) -@pytest.mark.parametrize("category0, category1", [ +@pytest.mark.parametrize("in_type, out_type", [ ('float', 'int'), ('float', 'uint'), ('complex', 'int'), ('complex', 'uint'), ]) -def test_scaling_in_abstract_warn(category0, category1): +def test_scaling_in_abstract_warn(in_type, out_type): # Converting floats to integer - for in_type in np.sctypes[category0]: - for out_type in np.sctypes[category1]: + for in_tp in np.sctypes[in_type]: + for out_tp in np.sctypes[out_type]: with suppress_warnings(): # overflow - check_int_a2f(in_type, out_type) + check_int_a2f(in_tp, out_tp) def check_int_a2f(in_type, out_type): diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 5959057fc9..3de2af99dd 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -384,7 +384,7 @@ def test_get_data(self): "documentation for caveats) or slicing image array data " "with `img.dataobj[slice]` or `img.get_fdata()[slice]`") assert in_data is img.dataobj - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): out_data = img.get_data() assert in_data is out_data # and that uncache has no effect @@ -397,18 +397,18 @@ def test_get_data(self): rt_img = bytesio_round_trip(img) assert not in_data is rt_img.dataobj assert (rt_img.dataobj == in_data).all() - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): out_data = rt_img.get_data() assert (out_data == in_data).all() assert not rt_img.dataobj is out_data # cache - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): assert rt_img.get_data() is out_data out_data[:] = 42 rt_img.uncache() - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): assert not rt_img.get_data() is out_data - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): assert (rt_img.get_data() == in_data).all() def test_slicer(self): @@ -531,12 +531,12 @@ def test_slicer(self): pass else: sliced_data = in_data[sliceobj] - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): assert (sliced_data == sliced_img.get_data()).all() assert (sliced_data == sliced_img.get_fdata()).all() assert (sliced_data == sliced_img.dataobj).all() assert (sliced_data == img.dataobj[sliceobj]).all() - with pytest.warns(DeprecationWarning): + with pytest.deprecated_call(): assert (sliced_data == img.get_data()[sliceobj]).all() assert (sliced_data == img.get_fdata()[sliceobj]).all() @@ -648,12 +648,7 @@ def test_load_mmap(self): def test_header_deprecated(): - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) - + with pytest.deprecated_call(): class MyHeader(Header): pass - assert len(w) == 0 - - MyHeader() - assert len(w) == 1 + MyHeader() \ No newline at end of file diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index e1d559878a..331238db5c 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -148,8 +148,7 @@ def test_origin_checks(self): assert (message == 'very large origin values ' 'relative to dims; leaving as set, ' 'ignoring for affine') - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) + pytest.raises(*raiser) # diagnose binary block dxer = self.header_class.diagnose_binaryblock assert (dxer(hdr.binaryblock) == From c5ccb2eb8da75ac4c5d27819320126931bf621ef Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 559/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 5122e2d410d0b86c2bc48a15dfa06fc6d511128d Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 560/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From a67a9c1cbd95e3aecb6ae114aae4f6b59938d2c7 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 561/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index a653082f78..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 7013d72066f5e0a7c98c4ad2a4843421102c6792 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 562/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 857bed6140b370d0f0f686c2ac00551e15b9caa6 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 563/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 1a245250b6e220c575caba0e68789d425426b0cc Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 564/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From fd2ddde0e2b5d5c883dd6ef10901f07a8ec4852d Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 565/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 920b30f72ccf85c6f3794df55a0fbbbaf69bd2d5 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 566/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From f331d20653ec820c83927194250a4fea66305cf5 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 567/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 975178477ddb33de93a9b8ef2a2eee8066ab4f84 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 568/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 29da7b06d6c74f27b03599faf5fa0786edfe7862 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 569/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index a653082f78..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From f42da0401d093a1dee51faf9a0756406e5475c92 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 570/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From dff77ba68d189e3280f436acbc531f56d7d86e11 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 571/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 61bc71498a938042f241c7f2b2c270cfac9c81d0 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 572/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 338b4c3734f14ec52bbd729fe4d3d9e2b66f568f Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 573/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..4bc2957c4a 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 2d799c4b7fec1caa2551b6f3a023f64e1d6ac952 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 574/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 436404f0dc0d06ecf7880713c4a83636f9644b1b Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 575/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 831e553fdb630b0e457155ecfcaff11c390a8e77 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 576/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 4bc2957c4a..753eeb5e15 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 099e5cf9d912f742b5776b9b7a36efc532680355 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 577/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index a653082f78..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 91be3bfec2a8b724a96b81fb1d401e6793ddd332 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 15:56:41 -0500 Subject: [PATCH 578/689] converting nibabel_data --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 753eeb5e15..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -46,4 +46,4 @@ def needs_nibabel_data(subdir=None): # Path should not be empty (as is the case for not-updated submodules) have_files = exists(required_path) and len(listdir(required_path)) > 0 return pytest.mark.skipif(not have_files, - reason="Need files in {0} for these tests".format(required_path)) + reason="Need files in {0} for these tests".format(required_path)) \ No newline at end of file From 4becc96b8e6d43e814bf0bb79bf8855942498087 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 579/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From ddb21c3b83a35ca2bf7d7baea5b8d90cc707b09c Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 580/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 5ca8a74afb126dfd2d0414badd410d2f404ec9ff Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 581/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 93732925c832571d0d0d496768d8c45662ecf68f Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 582/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 20dcc697702b102895e029ed5fe080dbfcae2f2f Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 583/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From f6f5e0421ccf1c9bc2ccd2e13e827c23c15a3ff5 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 584/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 13bacafc3c5611ee92f02296216237a8931df706 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 585/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 525b99d68b6433a02a9dce3aca01d60812abbe05 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 586/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index a653082f78..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 82c39a1bc395f091490238e28ad45772df732efb Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 587/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From ff899a373c6121633bb0c61bd26a2a6435cc9106 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 588/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 9daf10f15726856ff5b9bd366cc49e5a8d2e364e Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 589/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From b1d8cfb1c47281293788c41ff9ef28a0bbfd2767 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 590/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 75f2ce667db957d7b710b9f8f510866221485b7c Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 591/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 683a3ee8e284ff0156c4d4dd91d262e4d25a2464 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 592/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 39c011e5cdcfcd807d23177289f8a32feddfbf51 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 593/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From d5e5907abd2c85bfbff8de2ebc929cb91d25f319 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 594/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index a653082f78..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From f7b49f38601b42276b8cc0445fd58a3130729ac4 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 595/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From cdf6e182a2dfca3611a63b1cdc32e1a036b64fd5 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 596/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 8ba8134a023850d27da6f632b5a07bc138b0a0f8 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 597/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From e86066942737afe3e1267b7da675dbb74186c86d Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 598/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 32c1c1739fa6c3fa4e2655a1aa88750b0adb1626 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 599/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 8a9f53d2e92c58fae73ac2825520e64aef2aad9f Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 600/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 50e7d98b0a72342b1b5a11067a2982ec61529188 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 601/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From d4f2044bd791062501042c272812b27f28e57ff0 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 14:50:03 -0500 Subject: [PATCH 602/689] converting more tests nibabel.tests.test_s* --- nibabel/tests/test_spaces.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index a653082f78..7f8ae94c51 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,7 +76,15 @@ def get_outspace_params(): ) -def test_vox2out_vox(): +@pytest.mark.parametrize("arg_tuple", [ + # Enforce number of axes + ((2, 3, 4, 5), np.eye(4)), + ((2, 3, 4, 5, 6), np.eye(4)), + # Voxel sizes must be positive + ((2, 3, 4), np.eye(4), [-1, 1, 1]), + ((2, 3, 4), np.eye(4), [1, 0, 1]) +]) +def test_vox2out_vox(arg_tuple): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 547ba9265415619486ac1a770cf4714e73690d70 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 603/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From d8a15afcf70d7aeafed9ea558c261bf762a1731b Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:45:57 -0500 Subject: [PATCH 604/689] converting nibabel/tests/test_scripts.py --- nibabel/tests/test_scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..db83a4d3f5 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') +@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From 4e32787f09c89a78f0af18aa5ab71f273ef20b3d Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 11:46:26 -0500 Subject: [PATCH 605/689] adding todo --- nibabel/tests/nibabel_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..89a1e1c9b2 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' - +# dj-TODO def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data From 59af0aa0718c49131895aec9d8307714eaca8fae Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Feb 2020 16:04:42 -0500 Subject: [PATCH 606/689] removing todos --- nibabel/tests/nibabel_data.py | 2 +- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index 89a1e1c9b2..e2e5bc9ed3 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -22,7 +22,7 @@ def get_nibabel_data(): nibabel_data = pjoin(containing_path, 'nibabel-data') return nibabel_data if isdir(nibabel_data) else '' -# dj-TODO + def needs_nibabel_data(subdir=None): """ Decorator for tests needing nibabel-data diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index db83a4d3f5..873059b510 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -297,7 +297,7 @@ def test_parrec2nii(): @script_test -@needs_nibabel_data('nitest-balls1') #dj-TODO: together eith nibabel_data +@needs_nibabel_data('nitest-balls1') def test_parrec2nii_with_data(): # Use nibabel-data to test conversion # Premultiplier to relate our affines to Philips conversion From a21c0834188b9a5708f6e82cc885513a1ac46275 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 10:36:20 -0500 Subject: [PATCH 607/689] small edits: suggestions from reviews --- nibabel/tests/test_spaces.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/nibabel/tests/test_spaces.py b/nibabel/tests/test_spaces.py index 7f8ae94c51..a653082f78 100644 --- a/nibabel/tests/test_spaces.py +++ b/nibabel/tests/test_spaces.py @@ -76,15 +76,7 @@ def get_outspace_params(): ) -@pytest.mark.parametrize("arg_tuple", [ - # Enforce number of axes - ((2, 3, 4, 5), np.eye(4)), - ((2, 3, 4, 5, 6), np.eye(4)), - # Voxel sizes must be positive - ((2, 3, 4), np.eye(4), [-1, 1, 1]), - ((2, 3, 4), np.eye(4), [1, 0, 1]) -]) -def test_vox2out_vox(arg_tuple): +def test_vox2out_vox(): # Test world space bounding box # Test basic case, identity, no voxel sizes passed shape, aff = vox2out_vox(((2, 3, 4), np.eye(4))) From 8d929771fa3e7d802abf3b333d9cac545971079c Mon Sep 17 00:00:00 2001 From: orduek Date: Wed, 5 Feb 2020 16:49:25 -0500 Subject: [PATCH 608/689] fixes to assert numpy --- nibabel/freesurfer/tests/test_io.py | 16 +++++++++------- nibabel/freesurfer/tests/test_mghformat.py | 6 +++--- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 77a8bc12a0..1ca5027bf9 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -61,8 +61,9 @@ def test_geometry(): assert 0 == faces.min() assert coords.shape[0] == (faces.max() + 1) assert 9 == len(volume_info) - assert [2, 0, 20] == volume_info['head'] - assert ['created by greve on Thu Jun 8 19:17:51 2006'] == create_stamp + # assert np.array_equal([2, 0, 20],volume_info['head']) + np.testing.assert_array_equal([2, 0, 20],volume_info['head']) + assert create_stamp == ['created by greve on Thu Jun 8 19:17:51 2006'] # this creates assertion error - should we just remove it? # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -79,7 +80,8 @@ def test_geometry(): for key in ('xras', 'yras', 'zras', 'cras'): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) - assert volume_info2['cras'] == volume_info['cras'] + #assert.array_equal(volume_info2['cras'], volume_info['cras']) + np.testing.assert_array_equal(volume_info2['cras'], volume_info['cras']) with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) read_create_stamp = fobj.readline().decode().rstrip('\n') @@ -126,8 +128,8 @@ def test_quad_geometry(): new_path = 'test' write_geometry(new_path, coords, faces) coords2, faces2 = read_geometry(new_path) - assert coords == coords2 - assert faces == faces2 + assert np.array_equal(coords,coords2) + assert np.array_equal(faces, faces2) @freesurfer_test @@ -141,7 +143,7 @@ def test_morph_data(): new_path = 'test' write_morph_data(new_path, curv) curv2 = read_morph_data(new_path) - assert curv2 == curv + assert np.array_equal (curv2, curv) def test_write_morph_data(): @@ -154,7 +156,7 @@ def test_write_morph_data(): for shape in okay_shapes: write_morph_data('test.curv', values.reshape(shape)) # Check ordering is preserved, regardless of shape - assert values == read_morph_data('test.curv') + assert np.array_equal(read_morph_data('test.curv') ,values) with pytest.raises(ValueError): write_morph_data('test.curv', np.zeros(shape), big_num) # Windows 32-bit overflows Python int diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 37b2faa2b4..41bffbf6ac 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -26,7 +26,7 @@ import pytest -from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) +from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_almost_equal from ...testing_pytest import data_path @@ -218,14 +218,14 @@ def test_header_updating(): assert_almost_equal(mgz.affine, exp_aff, 6) assert_almost_equal(hdr.get_affine(), exp_aff, 6) # Test that initial wonky header elements have not changed - assert hdr['delta'] == 1 + assert_array_equal(hdr['delta'], 1) assert_almost_equal(hdr['Mdc'].T, exp_aff[:3, :3]) # Save, reload, same thing img_fobj = io.BytesIO() mgz2 = _mgh_rt(mgz, img_fobj) hdr2 = mgz2.header assert_almost_equal(hdr2.get_affine(), exp_aff, 6) - assert hdr2['delta'] == 1 + assert_array_equal(hdr2['delta'],1) # Change affine, change underlying header info exp_aff_d = exp_aff.copy() exp_aff_d[0, -1] = -14 From ae39e06629dfe792745dd97b71461e85d0fad887 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 15:35:14 -0500 Subject: [PATCH 609/689] changed nosetools assert_true to assert in test_io.py --- nibabel/freesurfer/tests/test_io.py | 35 +++++++++++++++-------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index fc926ee2af..41bcdd17cf 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -9,7 +9,8 @@ from ...tmpdirs import InTemporaryDirectory -from nose.tools import assert_true + +import pytest import numpy as np from numpy.testing import assert_equal, assert_raises, dec, assert_allclose, assert_array_equal @@ -92,13 +93,13 @@ def test_geometry(): with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) read_geometry(surf_path, read_metadata=True) - assert_true(any('volume information contained' in str(ww.message) + assert(any('volume information contained' in str(ww.message) for ww in w)) - assert_true(any('extension code' in str(ww.message) for ww in w)) + assert(any('extension code' in str(ww.message) for ww in w)) volume_info['head'] = [1, 2] with clear_and_catch_warnings() as w: write_geometry(surf_path, coords, faces, create_stamp, volume_info) - assert_true(any('Unknown extension' in str(ww.message) for ww in w)) + assert(any('Unknown extension' in str(ww.message) for ww in w)) volume_info['a'] = 0 assert_raises(ValueError, write_geometry, surf_path, coords, faces, create_stamp, volume_info) @@ -137,8 +138,8 @@ def test_morph_data(): """Test IO of morphometry data file (eg. curvature).""" curv_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "curv")) curv = read_morph_data(curv_path) - assert_true(-1.0 < curv.min() < 0) - assert_true(0 < curv.max() < 1.0) + assert(-1.0 < curv.min() < 0) + assert(0 < curv.max() < 1.0) with InTemporaryDirectory(): new_path = 'test' write_morph_data(new_path, curv) @@ -177,8 +178,8 @@ def test_annot(): hash_ = _hash_file_content(annot_path) labels, ctab, names = read_annot(annot_path) - assert_true(labels.shape == (163842, )) - assert_true(ctab.shape == (len(names), 5)) + assert(labels.shape == (163842, )) + assert(ctab.shape == (len(names), 5)) labels_orig = None if a == 'aparc': @@ -186,9 +187,9 @@ def test_annot(): np.testing.assert_array_equal(labels == -1, labels_orig == 0) # Handle different version of fsaverage if hash_ == 'bf0b488994657435cdddac5f107d21e8': - assert_true(np.sum(labels_orig == 0) == 13887) + assert(np.sum(labels_orig == 0) == 13887) elif hash_ == 'd4f5b7cbc2ed363ac6fcf89e19353504': - assert_true(np.sum(labels_orig == 1639705) == 13327) + assert(np.sum(labels_orig == 1639705) == 13327) else: raise RuntimeError("Unknown freesurfer file. Please report " "the problem to the maintainer of nibabel.") @@ -272,7 +273,7 @@ def test_write_annot_fill_ctab(): print(labels) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert_true( + assert( any('Annotation values in {} will be incorrect'.format( annot_path) == str(ww.message) for ww in w)) labels2, rgbal2, names2 = read_annot(annot_path, orig_ids=True) @@ -288,7 +289,7 @@ def test_write_annot_fill_ctab(): rgbal[:, 2] * (2 ** 16)) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert_true( + assert( not any('Annotation values in {} will be incorrect'.format( annot_path) == str(ww.message) for ww in w)) labels2, rgbal2, names2 = read_annot(annot_path) @@ -348,13 +349,13 @@ def test_label(): label_path = pjoin(data_path, "label", "lh.cortex.label") label = read_label(label_path) # XXX : test more - assert_true(label.min() >= 0) - assert_true(label.max() <= 163841) - assert_true(label.shape[0] <= 163842) + assert(label.min() >= 0) + assert(label.max() <= 163841) + assert(label.shape[0] <= 163842) labels, scalars = read_label(label_path, True) - assert_true(np.all(labels == label)) - assert_true(len(labels) == len(scalars)) + assert(np.all(labels == label)) + assert(len(labels) == len(scalars)) def test_write_annot_maxstruct(): From 665b28ae6f15a4f6698571fc1a263a500d5605f5 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 15:59:43 -0500 Subject: [PATCH 610/689] changed nosetools assertion to pytest in test_mghformat.py --- nibabel/freesurfer/tests/test_mghformat.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 289acbcd01..25a7254def 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -23,7 +23,8 @@ from ...wrapstruct import WrapStructError from ... import imageglobals -from nose.tools import assert_true, assert_false + +import pytest from numpy.testing import (assert_equal, assert_array_equal, assert_array_almost_equal, assert_almost_equal, @@ -260,7 +261,7 @@ def test_eq(): hdr2 = MGHHeader() assert_equal(hdr, hdr2) hdr.set_data_shape((2, 3, 4)) - assert_false(hdr == hdr2) + assert(hdr != hdr2) hdr2.set_data_shape((2, 3, 4)) assert_equal(hdr, hdr2) @@ -287,7 +288,7 @@ def test_mgh_load_fileobj(): bio = io.BytesIO(contents) fm = MGHImage.make_file_map(mapping=dict(image=bio)) img2 = MGHImage.from_file_map(fm) - assert_true(img2.dataobj.file_like is bio) + assert(img2.dataobj.file_like is bio) assert_array_equal(img.get_fdata(), img2.get_fdata()) @@ -477,7 +478,7 @@ def test_as_byteswapped(self): # same code just returns a copy for endianness in BIG_CODES: hdr2 = hdr.as_byteswapped(endianness) - assert_false(hdr2 is hdr) + assert(hdr2 is not hdr) assert_equal(hdr2, hdr) # Different code raises error From 441e4fe7d6b99687122a6736373ef0d30ca90da5 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 17:07:58 -0500 Subject: [PATCH 611/689] changed from numpy raises to pytest.raises and skipif --- nibabel/freesurfer/tests/test_io.py | 65 ++++++++++++++--------------- 1 file changed, 31 insertions(+), 34 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 41bcdd17cf..cde2edeb5f 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -12,7 +12,7 @@ import pytest import numpy as np -from numpy.testing import assert_equal, assert_raises, dec, assert_allclose, assert_array_equal +from numpy.testing import assert_allclose, assert_array_equal from .. import (read_geometry, read_morph_data, read_annot, read_label, write_geometry, write_morph_data, write_annot) @@ -20,7 +20,7 @@ from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...fileslice import strided_scalar -from ...testing import clear_and_catch_warnings +from ...testing_pytest import clear_and_catch_warnings DATA_SDIR = 'fsaverage' @@ -36,10 +36,7 @@ data_path = pjoin(nib_data, 'nitest-freesurfer', DATA_SDIR) have_freesurfer = isdir(data_path) -freesurfer_test = dec.skipif( - not have_freesurfer, - 'cannot find freesurfer {0} directory'.format(DATA_SDIR)) - +freesurfer_test = pytest.mark.skipif(not have_freesurfer, reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) def _hash_file_content(fname): hasher = hashlib.md5() @@ -54,19 +51,18 @@ def test_geometry(): """Test IO of .surf""" surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "inflated")) coords, faces = read_geometry(surf_path) - assert_equal(0, faces.min()) - assert_equal(coords.shape[0], faces.max() + 1) + assert 0==faces.min() + assert coords.shape[0]== faces.max() + 1 surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "sphere")) coords, faces, volume_info, create_stamp = read_geometry( surf_path, read_metadata=True, read_stamp=True) - assert_equal(0, faces.min()) - assert_equal(coords.shape[0], faces.max() + 1) - assert_equal(9, len(volume_info)) - assert_equal([2, 0, 20], volume_info['head']) - assert_equal(u'created by greve on Thu Jun 8 19:17:51 2006', - create_stamp) + assert 0 == faces.min() + assert coords.shape[0] == faces.max() + 1 + assert 9 == len(volume_info) + assert [2, 0, 20] == volume_info['head'] + assert 'created by greve on Thu Jun 8 19:17:51 2006' == create_stamp # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -83,7 +79,7 @@ def test_geometry(): for key in ('xras', 'yras', 'zras', 'cras'): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) - assert_equal(volume_info2['cras'], volume_info['cras']) + assert volume_info2['cras'] == volume_info['cras'] with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) read_create_stamp = fobj.readline().decode().rstrip('\n') @@ -101,10 +97,11 @@ def test_geometry(): write_geometry(surf_path, coords, faces, create_stamp, volume_info) assert(any('Unknown extension' in str(ww.message) for ww in w)) volume_info['a'] = 0 - assert_raises(ValueError, write_geometry, surf_path, coords, - faces, create_stamp, volume_info) + with pytest.raises(ValueError): + write_geometry(surf_path, coords, faces, create_stamp, volume_info) + - assert_equal(create_stamp, read_create_stamp) + assert create_stamp == read_create_stamp np.testing.assert_array_equal(coords, coords2) np.testing.assert_array_equal(faces, faces2) @@ -123,14 +120,14 @@ def test_quad_geometry(): new_quad = pjoin(get_nibabel_data(), 'nitest-freesurfer', 'subjects', 'bert', 'surf', 'lh.inflated.nofix') coords, faces = read_geometry(new_quad) - assert_equal(0, faces.min()) - assert_equal(coords.shape[0], faces.max() + 1) + assert 0 == faces.min() + assert coords.shape[0] == faces.max() + 1 with InTemporaryDirectory(): new_path = 'test' write_geometry(new_path, coords, faces) coords2, faces2 = read_geometry(new_path) - assert_equal(coords, coords2) - assert_equal(faces, faces2) + assert coords == coords2 + assert faces == faces2 @freesurfer_test @@ -144,7 +141,7 @@ def test_morph_data(): new_path = 'test' write_morph_data(new_path, curv) curv2 = read_morph_data(new_path) - assert_equal(curv2, curv) + assert curv2 == curv def test_write_morph_data(): @@ -157,17 +154,17 @@ def test_write_morph_data(): for shape in okay_shapes: write_morph_data('test.curv', values.reshape(shape)) # Check ordering is preserved, regardless of shape - assert_equal(values, read_morph_data('test.curv')) - assert_raises(ValueError, write_morph_data, 'test.curv', - np.zeros(shape), big_num) - # Windows 32-bit overflows Python int + assert values == read_morph_data('test.curv') + with pytest.raises(ValueError): + write_morph_data('test.curv', np.zeros(shape), big_num) + # Windows 32-bit overflows Python int if np.dtype(np.int) != np.dtype(np.int32): - assert_raises(ValueError, write_morph_data, 'test.curv', - strided_scalar((big_num,))) + with pytest.raises(ValueError): + write_morph_data('test.curv', strided_scalar((big_num,))) for shape in bad_shapes: - assert_raises(ValueError, write_morph_data, 'test.curv', - values.reshape(shape)) - + with pytest.raises(ValueError): + write_morph_data('test.curv', values.reshape(shape)) + @freesurfer_test def test_annot(): @@ -208,7 +205,7 @@ def test_annot(): if labels_orig is not None: np.testing.assert_array_equal(labels_orig, labels_orig_2) np.testing.assert_array_equal(ctab, ctab2) - assert_equal(names, names2) + assert names == names2 def test_read_write_annot(): @@ -374,4 +371,4 @@ def test_write_annot_maxstruct(): # Check round-trip assert_array_equal(labels, rt_labels) assert_array_equal(rgba, rt_ctab[:, :4]) - assert_equal(names, [n.decode('ascii') for n in rt_names]) + assert names == [n.decode('ascii') for n in rt_names] From f99d92655b2f62cbb3637a0819f73ae88f264a78 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 17:28:50 -0500 Subject: [PATCH 612/689] Changed test_mghformat.py to pytest --- nibabel/freesurfer/tests/test_mghformat.py | 154 +++++++++++---------- 1 file changed, 79 insertions(+), 75 deletions(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 25a7254def..3d96aa60f6 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -26,12 +26,11 @@ import pytest -from numpy.testing import (assert_equal, assert_array_equal, - assert_array_almost_equal, assert_almost_equal, - assert_raises) -from ...testing import assert_not_equal +from numpy.testing import (assert_array_equal, + assert_array_almost_equal, assert_almost_equal) -from ...testing import data_path + +from ...testing_pytest import data_path from ...tests import test_spatialimages as tsi from ...tests.test_wrapstruct import _TestLabeledWrapStruct @@ -68,10 +67,10 @@ def test_read_mgh(): # header h = mgz.header - assert_equal(h['version'], 1) - assert_equal(h['type'], 3) - assert_equal(h['dof'], 0) - assert_equal(h['goodRASFlag'], 1) + assert h['version'] == 1 + assert h['type'] == 3 + assert h['dof'] == 0 + assert h['goodRASFlag'] == 1 assert_array_equal(h['dims'], [3, 4, 5, 2]) assert_almost_equal(h['tr'], 2.0) assert_almost_equal(h['flip_angle'], 0.0) @@ -102,10 +101,10 @@ def test_write_mgh(): # Delete loaded image to allow file deletion by windows del mgz # header - assert_equal(h['version'], 1) - assert_equal(h['type'], 3) - assert_equal(h['dof'], 0) - assert_equal(h['goodRASFlag'], 1) + assert h['version'] == 1 + assert h['type'] == 3 + assert h['dof'] == 0 + assert h['goodRASFlag'] == 1 assert_array_equal(h['dims'], [5, 4, 3, 2]) assert_almost_equal(h['tr'], 0.0) assert_almost_equal(h['flip_angle'], 0.0) @@ -132,10 +131,10 @@ def test_write_noaffine_mgh(): # Delete loaded image to allow file deletion by windows del mgz # header - assert_equal(h['version'], 1) - assert_equal(h['type'], 0) # uint8 for mgh - assert_equal(h['dof'], 0) - assert_equal(h['goodRASFlag'], 1) + assert h['version'] == 1 + assert h['type'] == 0 # uint8 for mgh + assert h['dof'] == 0 + assert h['goodRASFlag'] == 1 assert_array_equal(h['dims'], [7, 13, 3, 22]) assert_almost_equal(h['tr'], 0.0) assert_almost_equal(h['flip_angle'], 0.0) @@ -158,7 +157,7 @@ def test_set_zooms(): (1, 1, -1, 1), (1, 1, 1, -1), (1, 1, 1, 1, 5)): - with assert_raises(HeaderDataError): + with pytest.raises(HeaderDataError): h.set_zooms(zooms) # smoke test for tr=0 h.set_zooms((1, 1, 1, 0)) @@ -178,7 +177,8 @@ def bad_dtype_mgh(): def test_bad_dtype_mgh(): # Now test the above function - assert_raises(MGHError, bad_dtype_mgh) + with pytest.raises(MGHError): + bad_dtype_mgh() def test_filename_exts(): @@ -219,14 +219,14 @@ def test_header_updating(): assert_almost_equal(mgz.affine, exp_aff, 6) assert_almost_equal(hdr.get_affine(), exp_aff, 6) # Test that initial wonky header elements have not changed - assert_equal(hdr['delta'], 1) + assert hdr['delta'] == 1 assert_almost_equal(hdr['Mdc'].T, exp_aff[:3, :3]) # Save, reload, same thing img_fobj = io.BytesIO() mgz2 = _mgh_rt(mgz, img_fobj) hdr2 = mgz2.header assert_almost_equal(hdr2.get_affine(), exp_aff, 6) - assert_equal(hdr2['delta'], 1) + assert hdr2['delta'] == 1 # Change affine, change underlying header info exp_aff_d = exp_aff.copy() exp_aff_d[0, -1] = -14 @@ -259,17 +259,17 @@ def test_eq(): # Test headers compare properly hdr = MGHHeader() hdr2 = MGHHeader() - assert_equal(hdr, hdr2) + assert hdr == hdr2 hdr.set_data_shape((2, 3, 4)) assert(hdr != hdr2) hdr2.set_data_shape((2, 3, 4)) - assert_equal(hdr, hdr2) + assert hdr == hdr2 def test_header_slope_inter(): # Test placeholder slope / inter method hdr = MGHHeader() - assert_equal(hdr.get_slope_inter(), (None, None)) + assert hdr.get_slope_inter() == (None, None) def test_mgh_load_fileobj(): @@ -281,7 +281,7 @@ def test_mgh_load_fileobj(): # pass the filename to the array proxy, please feel free to change this # test. img = MGHImage.load(MGZ_FNAME) - assert_equal(img.dataobj.file_like, MGZ_FNAME) + assert img.dataobj.file_like == MGZ_FNAME # Check fileobj also passed into dataobj with ImageOpener(MGZ_FNAME) as fobj: contents = fobj.read() @@ -296,7 +296,7 @@ def test_mgh_affine_default(): hdr = MGHHeader() hdr['goodRASFlag'] = 0 hdr2 = MGHHeader(hdr.binaryblock) - assert_equal(hdr2['goodRASFlag'], 1) + assert hdr2['goodRASFlag'] == 1 assert_array_equal(hdr['Mdc'], hdr2['Mdc']) assert_array_equal(hdr['Pxyz_c'], hdr2['Pxyz_c']) @@ -311,33 +311,33 @@ def test_mgh_set_data_shape(): assert_array_equal(hdr.get_data_shape(), (5, 4, 3)) hdr.set_data_shape((5, 4, 3, 2)) assert_array_equal(hdr.get_data_shape(), (5, 4, 3, 2)) - with assert_raises(ValueError): + with pytest.raises(ValueError): hdr.set_data_shape((5, 4, 3, 2, 1)) def test_mghheader_default_structarr(): hdr = MGHHeader.default_structarr() - assert_equal(hdr['version'], 1) + assert hdr['version'] == 1 assert_array_equal(hdr['dims'], 1) - assert_equal(hdr['type'], 3) - assert_equal(hdr['dof'], 0) - assert_equal(hdr['goodRASFlag'], 1) + assert hdr['type'] == 3 + assert hdr['dof'] == 0 + assert hdr['goodRASFlag'] == 1 assert_array_equal(hdr['delta'], 1) assert_array_equal(hdr['Mdc'], [[-1, 0, 0], [0, 0, 1], [0, -1, 0]]) assert_array_equal(hdr['Pxyz_c'], 0) - assert_equal(hdr['tr'], 0) - assert_equal(hdr['flip_angle'], 0) - assert_equal(hdr['te'], 0) - assert_equal(hdr['ti'], 0) - assert_equal(hdr['fov'], 0) + assert hdr['tr'] ==0 + assert hdr['flip_angle'] == 0 + assert hdr['te'] == 0 + assert hdr['ti'] == 0 + assert hdr['fov'] == 0 for endianness in (None,) + BIG_CODES: hdr2 = MGHHeader.default_structarr(endianness=endianness) - assert_equal(hdr2, hdr) - assert_equal(hdr2.newbyteorder('>'), hdr) + assert hdr2 == hdr + assert hdr2.newbyteorder('>') == hdr for endianness in LITTLE_CODES: - with assert_raises(ValueError): + with pytest.raises(ValueError): MGHHeader.default_structarr(endianness=endianness) @@ -352,17 +352,17 @@ def test_deprecated_fields(): hdr['mrparams'] = [1, 2, 3, 4] assert_array_almost_equal(hdr['mrparams'], [1, 2, 3, 4]) - assert_equal(hdr['tr'], 1) - assert_equal(hdr['flip_angle'], 2) - assert_equal(hdr['te'], 3) - assert_equal(hdr['ti'], 4) - assert_equal(hdr['fov'], 0) + assert hdr['tr'] == 1 + assert hdr['flip_angle'] == 2 + assert hdr['te'] == 3 + assert hdr['ti'] == 4 + assert hdr['fov'] == 0 assert_array_almost_equal(hdr_data['mrparams'], [1, 2, 3, 4]) - assert_equal(hdr_data['tr'], 1) - assert_equal(hdr_data['flip_angle'], 2) - assert_equal(hdr_data['te'], 3) - assert_equal(hdr_data['ti'], 4) - assert_equal(hdr_data['fov'], 0) + assert hdr_data['tr'] == 1 + assert hdr_data['flip_angle'] == 2 + assert hdr_data['te'] == 3 + assert hdr_data['ti'] == 4 + assert hdr_data['fov'] == 0 hdr['tr'] = 5 hdr['flip_angle'] = 6 @@ -389,7 +389,7 @@ def check_dtypes(self, expected, actual): # Some images will want dtypes to be equal including endianness, # others may only require the same type # MGH requires the actual to be a big endian version of expected - assert_equal(expected.newbyteorder('>'), actual) + assert expected.newbyteorder('>') == actual class TestMGHHeader(_TestLabeledWrapStruct): @@ -406,9 +406,9 @@ def test_general_init(self): hdr = self.header_class() # binaryblock has length given by header data dtype binblock = hdr.binaryblock - assert_equal(len(binblock), hdr.structarr.dtype.itemsize) + assert len(binblock) == hdr.structarr.dtype.itemsize # Endianness will always be big, and cannot be set - assert_equal(hdr.endianness, '>') + assert hdr.endianness == '>' # You can also pass in a check flag, without data this has no # effect hdr = self.header_class(check=False) @@ -417,15 +417,15 @@ def test__eq__(self): # Test equal and not equal hdr1 = self.header_class() hdr2 = self.header_class() - assert_equal(hdr1, hdr2) + assert hdr1 == hdr2 self._set_something_into_hdr(hdr1) - assert_not_equal(hdr1, hdr2) + assert hdr1 != hdr2 self._set_something_into_hdr(hdr2) - assert_equal(hdr1, hdr2) + assert hdr1 == hdr2 # REMOVED as_byteswapped() test # Check comparing to funny thing says no - assert_not_equal(hdr1, None) - assert_not_equal(hdr1, 1) + assert hdr1 != None + assert hdr1 != 1 def test_to_from_fileobj(self): # Successful write using write_to @@ -434,56 +434,58 @@ def test_to_from_fileobj(self): hdr.write_to(str_io) str_io.seek(0) hdr2 = self.header_class.from_fileobj(str_io) - assert_equal(hdr2.endianness, '>') - assert_equal(hdr2.binaryblock, hdr.binaryblock) + assert hdr2.endianness == '>' + assert hdr2.binaryblock == hdr.binaryblock def test_endian_guess(self): # Check guesses of endian eh = self.header_class() - assert_equal(eh.endianness, '>') - assert_equal(self.header_class.guessed_endian(eh), '>') + assert eh.endianness == '>' + assert self.header_class.guessed_endian(eh) == '>' def test_bytes(self): # Test get of bytes hdr1 = self.header_class() bb = hdr1.binaryblock hdr2 = self.header_class(hdr1.binaryblock) - assert_equal(hdr1, hdr2) - assert_equal(hdr1.binaryblock, hdr2.binaryblock) + assert hdr1 == hdr2 + assert hdr1.binaryblock == hdr2.binaryblock # Do a set into the header, and try again. The specifics of 'setting # something' will depend on the nature of the bytes object self._set_something_into_hdr(hdr1) hdr2 = self.header_class(hdr1.binaryblock) - assert_equal(hdr1, hdr2) - assert_equal(hdr1.binaryblock, hdr2.binaryblock) + assert hdr1 == hdr2 + assert hdr1.binaryblock == hdr2.binaryblock # Short binaryblocks give errors (here set through init) # Long binaryblocks are truncated - assert_raises(WrapStructError, - self.header_class, - bb[:self.header_class._hdrdtype.itemsize - 1]) + with pytest.raises(WrapStructError): + self.header_class(bb[:self.header_class._hdrdtype.itemsize - 1]) + # Checking set to true by default, and prevents nonsense being # set into the header. bb_bad = self.get_bad_bb() if bb_bad is None: return with imageglobals.LoggingOutputSuppressor(): - assert_raises(HeaderDataError, self.header_class, bb_bad) + with pytest.raises(HeaderDataError): + self.header_class(bb_bad) + # now slips past without check _ = self.header_class(bb_bad, check=False) def test_as_byteswapped(self): # Check byte swapping hdr = self.header_class() - assert_equal(hdr.endianness, '>') + assert hdr.endianness == '>' # same code just returns a copy for endianness in BIG_CODES: hdr2 = hdr.as_byteswapped(endianness) assert(hdr2 is not hdr) - assert_equal(hdr2, hdr) + assert hdr2 == hdr # Different code raises error for endianness in (None,) + LITTLE_CODES: - with assert_raises(ValueError): + with pytest.raises(ValueError): hdr.as_byteswapped(endianness) # Note that contents is not rechecked on swap / copy class DC(self.header_class): @@ -491,7 +493,9 @@ def check_fix(self, *args, **kwargs): raise Exception # Assumes check=True default - assert_raises(Exception, DC, hdr.binaryblock) + with pytest.raises(Exception): + DC(hdr.binaryblock) + hdr = DC(hdr.binaryblock, check=False) hdr2 = hdr.as_byteswapped('>') @@ -500,8 +504,8 @@ def test_checks(self): hdr_t = self.header_class() # _dxer just returns the diagnostics as a string # Default hdr is OK - assert_equal(self._dxer(hdr_t), '') + assert self._dxer(hdr_t) == '' # Version should be 1 hdr = hdr_t.copy() hdr['version'] = 2 - assert_equal(self._dxer(hdr), 'Unknown MGH format version') + assert self._dxer(hdr) == 'Unknown MGH format version' From 6e1e1c1a81505d671de259b4da58937ef73d6824 Mon Sep 17 00:00:00 2001 From: orduek Date: Tue, 4 Feb 2020 17:55:23 -0500 Subject: [PATCH 613/689] small fixes --- nibabel/freesurfer/tests/test_io.py | 34 +++++++++++----------- nibabel/freesurfer/tests/test_mghformat.py | 3 +- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index cde2edeb5f..77a8bc12a0 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -51,18 +51,18 @@ def test_geometry(): """Test IO of .surf""" surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "inflated")) coords, faces = read_geometry(surf_path) - assert 0==faces.min() - assert coords.shape[0]== faces.max() + 1 + assert 0 == faces.min() + assert coords.shape[0] == faces.max() + 1 surf_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "sphere")) coords, faces, volume_info, create_stamp = read_geometry( surf_path, read_metadata=True, read_stamp=True) assert 0 == faces.min() - assert coords.shape[0] == faces.max() + 1 + assert coords.shape[0] == (faces.max() + 1) assert 9 == len(volume_info) assert [2, 0, 20] == volume_info['head'] - assert 'created by greve on Thu Jun 8 19:17:51 2006' == create_stamp + assert ['created by greve on Thu Jun 8 19:17:51 2006'] == create_stamp # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -121,7 +121,7 @@ def test_quad_geometry(): 'bert', 'surf', 'lh.inflated.nofix') coords, faces = read_geometry(new_quad) assert 0 == faces.min() - assert coords.shape[0] == faces.max() + 1 + assert coords.shape[0] == (faces.max() + 1) with InTemporaryDirectory(): new_path = 'test' write_geometry(new_path, coords, faces) @@ -135,8 +135,8 @@ def test_morph_data(): """Test IO of morphometry data file (eg. curvature).""" curv_path = pjoin(data_path, "surf", "%s.%s" % ("lh", "curv")) curv = read_morph_data(curv_path) - assert(-1.0 < curv.min() < 0) - assert(0 < curv.max() < 1.0) + assert -1.0 < curv.min() < 0 + assert 0 < curv.max() < 1.0 with InTemporaryDirectory(): new_path = 'test' write_morph_data(new_path, curv) @@ -175,8 +175,8 @@ def test_annot(): hash_ = _hash_file_content(annot_path) labels, ctab, names = read_annot(annot_path) - assert(labels.shape == (163842, )) - assert(ctab.shape == (len(names), 5)) + assert labels.shape == (163842, ) + assert ctab.shape == (len(names), 5) labels_orig = None if a == 'aparc': @@ -184,9 +184,9 @@ def test_annot(): np.testing.assert_array_equal(labels == -1, labels_orig == 0) # Handle different version of fsaverage if hash_ == 'bf0b488994657435cdddac5f107d21e8': - assert(np.sum(labels_orig == 0) == 13887) + assert np.sum(labels_orig == 0) == 13887 elif hash_ == 'd4f5b7cbc2ed363ac6fcf89e19353504': - assert(np.sum(labels_orig == 1639705) == 13327) + assert np.sum(labels_orig == 1639705) == 13327 else: raise RuntimeError("Unknown freesurfer file. Please report " "the problem to the maintainer of nibabel.") @@ -270,7 +270,7 @@ def test_write_annot_fill_ctab(): print(labels) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert( + assert ( any('Annotation values in {} will be incorrect'.format( annot_path) == str(ww.message) for ww in w)) labels2, rgbal2, names2 = read_annot(annot_path, orig_ids=True) @@ -346,13 +346,13 @@ def test_label(): label_path = pjoin(data_path, "label", "lh.cortex.label") label = read_label(label_path) # XXX : test more - assert(label.min() >= 0) - assert(label.max() <= 163841) - assert(label.shape[0] <= 163842) + assert label.min() >= 0 + assert label.max() <= 163841 + assert label.shape[0] <= 163842 labels, scalars = read_label(label_path, True) - assert(np.all(labels == label)) - assert(len(labels) == len(scalars)) + assert (np.all(labels == label)) + assert len(labels) == len(scalars) def test_write_annot_maxstruct(): diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 3d96aa60f6..37b2faa2b4 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -26,8 +26,7 @@ import pytest -from numpy.testing import (assert_array_equal, - assert_array_almost_equal, assert_almost_equal) +from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) from ...testing_pytest import data_path From d27b124a97a91534d0068024a577200d73c7aaea Mon Sep 17 00:00:00 2001 From: orduek Date: Wed, 5 Feb 2020 16:49:25 -0500 Subject: [PATCH 614/689] fixes to assert numpy --- nibabel/freesurfer/tests/test_io.py | 16 +++++++++------- nibabel/freesurfer/tests/test_mghformat.py | 6 +++--- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 77a8bc12a0..1ca5027bf9 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -61,8 +61,9 @@ def test_geometry(): assert 0 == faces.min() assert coords.shape[0] == (faces.max() + 1) assert 9 == len(volume_info) - assert [2, 0, 20] == volume_info['head'] - assert ['created by greve on Thu Jun 8 19:17:51 2006'] == create_stamp + # assert np.array_equal([2, 0, 20],volume_info['head']) + np.testing.assert_array_equal([2, 0, 20],volume_info['head']) + assert create_stamp == ['created by greve on Thu Jun 8 19:17:51 2006'] # this creates assertion error - should we just remove it? # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -79,7 +80,8 @@ def test_geometry(): for key in ('xras', 'yras', 'zras', 'cras'): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) - assert volume_info2['cras'] == volume_info['cras'] + #assert.array_equal(volume_info2['cras'], volume_info['cras']) + np.testing.assert_array_equal(volume_info2['cras'], volume_info['cras']) with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) read_create_stamp = fobj.readline().decode().rstrip('\n') @@ -126,8 +128,8 @@ def test_quad_geometry(): new_path = 'test' write_geometry(new_path, coords, faces) coords2, faces2 = read_geometry(new_path) - assert coords == coords2 - assert faces == faces2 + assert np.array_equal(coords,coords2) + assert np.array_equal(faces, faces2) @freesurfer_test @@ -141,7 +143,7 @@ def test_morph_data(): new_path = 'test' write_morph_data(new_path, curv) curv2 = read_morph_data(new_path) - assert curv2 == curv + assert np.array_equal (curv2, curv) def test_write_morph_data(): @@ -154,7 +156,7 @@ def test_write_morph_data(): for shape in okay_shapes: write_morph_data('test.curv', values.reshape(shape)) # Check ordering is preserved, regardless of shape - assert values == read_morph_data('test.curv') + assert np.array_equal(read_morph_data('test.curv') ,values) with pytest.raises(ValueError): write_morph_data('test.curv', np.zeros(shape), big_num) # Windows 32-bit overflows Python int diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 37b2faa2b4..41bffbf6ac 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -26,7 +26,7 @@ import pytest -from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) +from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_almost_equal from ...testing_pytest import data_path @@ -218,14 +218,14 @@ def test_header_updating(): assert_almost_equal(mgz.affine, exp_aff, 6) assert_almost_equal(hdr.get_affine(), exp_aff, 6) # Test that initial wonky header elements have not changed - assert hdr['delta'] == 1 + assert_array_equal(hdr['delta'], 1) assert_almost_equal(hdr['Mdc'].T, exp_aff[:3, :3]) # Save, reload, same thing img_fobj = io.BytesIO() mgz2 = _mgh_rt(mgz, img_fobj) hdr2 = mgz2.header assert_almost_equal(hdr2.get_affine(), exp_aff, 6) - assert hdr2['delta'] == 1 + assert_array_equal(hdr2['delta'],1) # Change affine, change underlying header info exp_aff_d = exp_aff.copy() exp_aff_d[0, -1] = -14 From 6890a62840db3c0e04373b839b3862982e19238b Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:18:29 -0500 Subject: [PATCH 615/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 1ca5027bf9..cbd73eb14e 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -36,7 +36,8 @@ data_path = pjoin(nib_data, 'nitest-freesurfer', DATA_SDIR) have_freesurfer = isdir(data_path) -freesurfer_test = pytest.mark.skipif(not have_freesurfer, reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) +freesurfer_test = pytest.mark.skipif(not have_freesurfer, + reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) def _hash_file_content(fname): hasher = hashlib.md5() From b7e8abb7d6bbdd54251f8b705d953407a1863b89 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:18:40 -0500 Subject: [PATCH 616/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index cbd73eb14e..b22ceb6e23 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -60,7 +60,7 @@ def test_geometry(): surf_path, read_metadata=True, read_stamp=True) assert 0 == faces.min() - assert coords.shape[0] == (faces.max() + 1) + assert coords.shape[0] == faces.max() + 1 assert 9 == len(volume_info) # assert np.array_equal([2, 0, 20],volume_info['head']) np.testing.assert_array_equal([2, 0, 20],volume_info['head']) From 5dbe4083cf470eef322a42a4d1835a535971d1b8 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:18:55 -0500 Subject: [PATCH 617/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index b22ceb6e23..144eac6a3d 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -62,7 +62,7 @@ def test_geometry(): assert 0 == faces.min() assert coords.shape[0] == faces.max() + 1 assert 9 == len(volume_info) - # assert np.array_equal([2, 0, 20],volume_info['head']) + assert np.array_equal([2, 0, 20], volume_info['head']) np.testing.assert_array_equal([2, 0, 20],volume_info['head']) assert create_stamp == ['created by greve on Thu Jun 8 19:17:51 2006'] # this creates assertion error - should we just remove it? From 24d96e4d8c12fc0e4c39308e179a6cbfd043de7e Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:19:09 -0500 Subject: [PATCH 618/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 144eac6a3d..c01d079258 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -63,7 +63,6 @@ def test_geometry(): assert coords.shape[0] == faces.max() + 1 assert 9 == len(volume_info) assert np.array_equal([2, 0, 20], volume_info['head']) - np.testing.assert_array_equal([2, 0, 20],volume_info['head']) assert create_stamp == ['created by greve on Thu Jun 8 19:17:51 2006'] # this creates assertion error - should we just remove it? # Test equivalence of freesurfer- and nibabel-generated triangular files From e8965c1a1e33d070acf2b7bcd77d127b2a3219bb Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:19:31 -0500 Subject: [PATCH 619/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index c01d079258..27d5d352a5 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -80,7 +80,7 @@ def test_geometry(): for key in ('xras', 'yras', 'zras', 'cras'): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) - #assert.array_equal(volume_info2['cras'], volume_info['cras']) + assert np.array_equal(volume_info2['cras'], volume_info['cras']) np.testing.assert_array_equal(volume_info2['cras'], volume_info['cras']) with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) From bbc36d77b7fd6c917a53f32660b481bfc8a18d54 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:19:40 -0500 Subject: [PATCH 620/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 27d5d352a5..9ebc1af138 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -156,7 +156,7 @@ def test_write_morph_data(): for shape in okay_shapes: write_morph_data('test.curv', values.reshape(shape)) # Check ordering is preserved, regardless of shape - assert np.array_equal(read_morph_data('test.curv') ,values) + assert np.array_equal(read_morph_data('test.curv'), values) with pytest.raises(ValueError): write_morph_data('test.curv', np.zeros(shape), big_num) # Windows 32-bit overflows Python int From e626a38e55a8771dafcaab65a0952c8d272b8128 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:19:48 -0500 Subject: [PATCH 621/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 9ebc1af138..50805c324b 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -81,7 +81,6 @@ def test_geometry(): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) assert np.array_equal(volume_info2['cras'], volume_info['cras']) - np.testing.assert_array_equal(volume_info2['cras'], volume_info['cras']) with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) read_create_stamp = fobj.readline().decode().rstrip('\n') From 2fb977b1605f5a4c4394f3b1b694e0a5e8f66de5 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:19:57 -0500 Subject: [PATCH 622/689] Update nibabel/freesurfer/tests/test_io.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_io.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 50805c324b..b1080a87b0 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -165,7 +165,6 @@ def test_write_morph_data(): for shape in bad_shapes: with pytest.raises(ValueError): write_morph_data('test.curv', values.reshape(shape)) - @freesurfer_test def test_annot(): From 30a4639f197cebc53681cbca73b78ff98c03a609 Mon Sep 17 00:00:00 2001 From: orduek Date: Wed, 5 Feb 2020 17:23:23 -0500 Subject: [PATCH 623/689] added test_io to Ignore list in azure and travis --- .azure-pipelines/windows.yml | 3 ++- .travis.yml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 89b47b8345..c7411a77c7 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -99,7 +99,8 @@ jobs: -I test_round_trip ^ -I test_rstutils ^ -I test_scaling ^ - -I test_wrapstruct + -I test_wrapstruct ^ + -I test_io displayName: 'Nose tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) - script: | diff --git a/.travis.yml b/.travis.yml index b869d33947..483efe29ec 100644 --- a/.travis.yml +++ b/.travis.yml @@ -186,7 +186,8 @@ script: -I test_round_trip \ -I test_rstutils \ -I test_scaling \ - -I test_wrapstruct + -I test_wrapstruct \ + -I test_io elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing From 165da32f980af96508f5b590a8db4f7796150671 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:32:09 -0500 Subject: [PATCH 624/689] Update nibabel/freesurfer/tests/test_mghformat.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_mghformat.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 41bffbf6ac..03f03b2c18 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -28,7 +28,6 @@ from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_almost_equal - from ...testing_pytest import data_path from ...tests import test_spatialimages as tsi From 54fc107813d82abe71d763ff4a0d35f1f105e174 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Wed, 5 Feb 2020 17:32:20 -0500 Subject: [PATCH 625/689] Update nibabel/freesurfer/tests/test_mghformat.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_mghformat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 03f03b2c18..19c33d30bc 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -323,7 +323,7 @@ def test_mghheader_default_structarr(): assert_array_equal(hdr['delta'], 1) assert_array_equal(hdr['Mdc'], [[-1, 0, 0], [0, 0, 1], [0, -1, 0]]) assert_array_equal(hdr['Pxyz_c'], 0) - assert hdr['tr'] ==0 + assert hdr['tr'] == 0 assert hdr['flip_angle'] == 0 assert hdr['te'] == 0 assert hdr['ti'] == 0 From 27ef4009a5e0c7ec2e5c1b72109315af5ee559ae Mon Sep 17 00:00:00 2001 From: orduek Date: Wed, 5 Feb 2020 17:45:26 -0500 Subject: [PATCH 626/689] fixed style and convetion in test_io.py --- nibabel/freesurfer/tests/test_io.py | 57 ++++++++++------------------- 1 file changed, 19 insertions(+), 38 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index b6b1e12fb1..b457a6c00d 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -36,12 +36,8 @@ data_path = pjoin(nib_data, 'nitest-freesurfer', DATA_SDIR) have_freesurfer = isdir(data_path) -<<<<<<< HEAD -freesurfer_test = pytest.mark.skipif(not have_freesurfer, reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) -======= freesurfer_test = pytest.mark.skipif(not have_freesurfer, reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) ->>>>>>> 2fb977b1605f5a4c4394f3b1b694e0a5e8f66de5 def _hash_file_content(fname): hasher = hashlib.md5() @@ -64,17 +60,10 @@ def test_geometry(): surf_path, read_metadata=True, read_stamp=True) assert 0 == faces.min() -<<<<<<< HEAD - assert coords.shape[0] == (faces.max() + 1) - assert 9 == len(volume_info) - # assert np.array_equal([2, 0, 20],volume_info['head']) - np.testing.assert_array_equal([2, 0, 20],volume_info['head']) -======= assert coords.shape[0] == faces.max() + 1 assert 9 == len(volume_info) assert np.array_equal([2, 0, 20], volume_info['head']) ->>>>>>> 2fb977b1605f5a4c4394f3b1b694e0a5e8f66de5 - assert create_stamp == ['created by greve on Thu Jun 8 19:17:51 2006'] # this creates assertion error - should we just remove it? + assert create_stamp == 'created by greve on Thu Jun 8 19:17:51 2006' # Test equivalence of freesurfer- and nibabel-generated triangular files # with respect to read_geometry() @@ -91,12 +80,8 @@ def test_geometry(): for key in ('xras', 'yras', 'zras', 'cras'): assert_allclose(volume_info2[key], volume_info[key], rtol=1e-7, atol=1e-30) -<<<<<<< HEAD - #assert.array_equal(volume_info2['cras'], volume_info['cras']) - np.testing.assert_array_equal(volume_info2['cras'], volume_info['cras']) -======= + assert np.array_equal(volume_info2['cras'], volume_info['cras']) ->>>>>>> 2fb977b1605f5a4c4394f3b1b694e0a5e8f66de5 with open(surf_path, 'rb') as fobj: np.fromfile(fobj, ">u1", 3) read_create_stamp = fobj.readline().decode().rstrip('\n') @@ -106,28 +91,31 @@ def test_geometry(): with clear_and_catch_warnings() as w: warnings.filterwarnings('always', category=DeprecationWarning) read_geometry(surf_path, read_metadata=True) - assert(any('volume information contained' in str(ww.message) + + assert( + any('volume information contained' in str(ww.message) for ww in w)) - assert(any('extension code' in str(ww.message) for ww in w)) + assert( + any('extension code' in str(ww.message) for ww in w)) volume_info['head'] = [1, 2] with clear_and_catch_warnings() as w: write_geometry(surf_path, coords, faces, create_stamp, volume_info) - assert(any('Unknown extension' in str(ww.message) for ww in w)) + assert( + any('Unknown extension' in str(ww.message) for ww in w)) volume_info['a'] = 0 with pytest.raises(ValueError): write_geometry(surf_path, coords, faces, create_stamp, volume_info) - assert create_stamp == read_create_stamp - np.testing.assert_array_equal(coords, coords2) - np.testing.assert_array_equal(faces, faces2) + assert np.array_equal(coords, coords2) + assert np.array_equal(faces, faces2) # Validate byte ordering coords_swapped = coords.byteswap().newbyteorder() faces_swapped = faces.byteswap().newbyteorder() - np.testing.assert_array_equal(coords_swapped, coords) - np.testing.assert_array_equal(faces_swapped, faces) + assert np.array_equal(coords_swapped, coords) + assert np.array_equal(faces_swapped, faces) @freesurfer_test @@ -171,11 +159,8 @@ def test_write_morph_data(): for shape in okay_shapes: write_morph_data('test.curv', values.reshape(shape)) # Check ordering is preserved, regardless of shape -<<<<<<< HEAD - assert np.array_equal(read_morph_data('test.curv') ,values) -======= assert np.array_equal(read_morph_data('test.curv'), values) ->>>>>>> 2fb977b1605f5a4c4394f3b1b694e0a5e8f66de5 + with pytest.raises(ValueError): write_morph_data('test.curv', np.zeros(shape), big_num) # Windows 32-bit overflows Python int @@ -185,10 +170,6 @@ def test_write_morph_data(): for shape in bad_shapes: with pytest.raises(ValueError): write_morph_data('test.curv', values.reshape(shape)) -<<<<<<< HEAD - -======= ->>>>>>> 2fb977b1605f5a4c4394f3b1b694e0a5e8f66de5 @freesurfer_test def test_annot(): @@ -225,10 +206,10 @@ def test_annot(): if labels_orig is not None: labels_orig_2, _, _ = read_annot(annot_path, orig_ids=True) - np.testing.assert_array_equal(labels, labels2) + assert np.array_equal(labels, labels2) if labels_orig is not None: - np.testing.assert_array_equal(labels_orig, labels_orig_2) - np.testing.assert_array_equal(ctab, ctab2) + assert np.array_equal(labels_orig, labels_orig_2) + assert np.array_equal(ctab, ctab2) assert names == names2 @@ -393,6 +374,6 @@ def test_write_annot_maxstruct(): # Validate the file can be read rt_labels, rt_ctab, rt_names = read_annot(annot_path) # Check round-trip - assert_array_equal(labels, rt_labels) - assert_array_equal(rgba, rt_ctab[:, :4]) + assert np.array_equal(labels, rt_labels) + assert np.array_equal(rgba, rt_ctab[:, :4]) assert names == [n.decode('ascii') for n in rt_names] From fdd26a23810f15b05a3a8c889cc6de7c05a73826 Mon Sep 17 00:00:00 2001 From: orduek Date: Wed, 5 Feb 2020 17:59:50 -0500 Subject: [PATCH 627/689] added more changes to style --- nibabel/freesurfer/tests/test_mghformat.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 19c33d30bc..d70c4e9742 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -103,7 +103,7 @@ def test_write_mgh(): assert h['type'] == 3 assert h['dof'] == 0 assert h['goodRASFlag'] == 1 - assert_array_equal(h['dims'], [5, 4, 3, 2]) + assert np.array_equal(h['dims'], [5, 4, 3, 2]) assert_almost_equal(h['tr'], 0.0) assert_almost_equal(h['flip_angle'], 0.0) assert_almost_equal(h['te'], 0.0) @@ -133,7 +133,7 @@ def test_write_noaffine_mgh(): assert h['type'] == 0 # uint8 for mgh assert h['dof'] == 0 assert h['goodRASFlag'] == 1 - assert_array_equal(h['dims'], [7, 13, 3, 22]) + assert np.array_equal(h['dims'], [7, 13, 3, 22]) assert_almost_equal(h['tr'], 0.0) assert_almost_equal(h['flip_angle'], 0.0) assert_almost_equal(h['te'], 0.0) @@ -217,7 +217,7 @@ def test_header_updating(): assert_almost_equal(mgz.affine, exp_aff, 6) assert_almost_equal(hdr.get_affine(), exp_aff, 6) # Test that initial wonky header elements have not changed - assert_array_equal(hdr['delta'], 1) + assert np.array_equal(hdr['delta'], 1) assert_almost_equal(hdr['Mdc'].T, exp_aff[:3, :3]) # Save, reload, same thing img_fobj = io.BytesIO() @@ -458,7 +458,7 @@ def test_bytes(self): # Long binaryblocks are truncated with pytest.raises(WrapStructError): self.header_class(bb[:self.header_class._hdrdtype.itemsize - 1]) - + # Checking set to true by default, and prevents nonsense being # set into the header. bb_bad = self.get_bad_bb() @@ -467,7 +467,7 @@ def test_bytes(self): with imageglobals.LoggingOutputSuppressor(): with pytest.raises(HeaderDataError): self.header_class(bb_bad) - + # now slips past without check _ = self.header_class(bb_bad, check=False) @@ -493,7 +493,7 @@ def check_fix(self, *args, **kwargs): # Assumes check=True default with pytest.raises(Exception): DC(hdr.binaryblock) - + hdr = DC(hdr.binaryblock, check=False) hdr2 = hdr.as_byteswapped('>') From f79de1dc2a371053541157ab12e89cd6a09cb3a4 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 23:51:44 -0500 Subject: [PATCH 628/689] ignoring tests that use needs_nibabel_data --- .azure-pipelines/windows.yml | 3 +++ .travis.yml | 3 +++ 2 files changed, 6 insertions(+) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index f63b5f48c3..3eb6cececb 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -49,10 +49,12 @@ jobs: -I test_batteryrunners ^ -I test_brikhead ^ -I test_casting ^ + -I test_cifti2io_axes ^ -I test_cifti2io_header ^ -I test_data ^ -I test_deprecated ^ -I test_deprecator ^ + -I test_dicomwrappers ^ -I test_dft ^ -I test_ecat ^ -I test_ecat_data ^ @@ -75,6 +77,7 @@ jobs: -I test_image_types ^ -I test_imageclasses ^ -I test_imageglobals ^ + -I test_io ^ -I test_keywordonly ^ -I test_loadsave ^ -I test_minc1 ^ diff --git a/.travis.yml b/.travis.yml index e8b20252f0..76d2535e68 100644 --- a/.travis.yml +++ b/.travis.yml @@ -136,10 +136,12 @@ script: -I test_batteryrunners \ -I test_brikhead \ -I test_casting \ + -I test_cifti2io_axes \ -I test_cifti2io_header \ -I test_data \ -I test_deprecated \ -I test_deprecator \ + -I test_dicomwrappers \ -I test_dft \ -I test_ecat \ -I test_ecat_data \ @@ -162,6 +164,7 @@ script: -I test_image_types \ -I test_imageclasses \ -I test_imageglobals \ + -I test_io \ -I test_keywordonly \ -I test_loadsave \ -I test_minc1 \ From 2539bcd0dd1227497d0ecf6443dc56e43c3ae938 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Feb 2020 23:54:54 -0500 Subject: [PATCH 629/689] some cleaning after using nose2pytest --- nibabel/tests/scriptrunner.py | 2 +- nibabel/tests/test_analyze.py | 2 +- nibabel/tests/test_arraywriters.py | 4 ++-- nibabel/tests/test_casting.py | 4 ++-- nibabel/tests/test_deprecator.py | 18 +++++++++--------- nibabel/tests/test_ecat.py | 2 +- nibabel/tests/test_environment.py | 2 +- nibabel/tests/test_files_interface.py | 8 ++++---- nibabel/tests/test_fileslice.py | 4 ++-- nibabel/tests/test_image_load_save.py | 4 ++-- nibabel/tests/test_nifti1.py | 14 +++++++------- nibabel/tests/test_openers.py | 4 ++-- nibabel/tests/test_optpkg.py | 4 ++-- nibabel/tests/test_parrec.py | 22 +++++++++++----------- nibabel/tests/test_scripts.py | 3 +-- nibabel/tests/test_spatialimages.py | 18 +++++++++--------- nibabel/tests/test_spm99analyze.py | 4 +--- nibabel/tests/test_trackvis.py | 18 ++++++++---------- nibabel/tests/test_wrapstruct.py | 2 +- 19 files changed, 67 insertions(+), 72 deletions(-) diff --git a/nibabel/tests/scriptrunner.py b/nibabel/tests/scriptrunner.py index 33b5e3dcef..0027cc36b2 100644 --- a/nibabel/tests/scriptrunner.py +++ b/nibabel/tests/scriptrunner.py @@ -135,7 +135,7 @@ def run_command(self, cmd, check_code=True): env['PYTHONPATH'] = self.local_module_dir + pathsep + pypath proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env) stdout, stderr = proc.communicate() - if proc.poll() == None: + if proc.poll() is None: proc.terminate() if check_code and proc.returncode != 0: raise RuntimeError( diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 14bfb3b5e7..2b2e78dd23 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -512,7 +512,7 @@ def test_from_header(self): for check in (True, False): copy = klass.from_header(hdr, check=check) assert hdr == copy - assert not hdr is copy + assert hdr is not copy class C(object): diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 1a1c4eb156..14e9025c21 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -640,7 +640,7 @@ def test_writer_maker(): assert (aw.slope, aw.inter) == (1, 0) aw.calc_scale() slope, inter = aw.slope, aw.inter - assert not (slope, inter) == (1, 0) + assert (slope, inter) != (1, 0) # Should run by default aw = make_array_writer(arr, np.int16) assert (aw.slope, aw.inter) == (slope, inter) @@ -704,7 +704,7 @@ def test_int_int_slope(): aw = SlopeArrayWriter(arr, out_dt) except ScalingError: continue - assert not aw.slope == 0 + assert aw.slope != 0 arr_back_sc = round_trip(aw) # integer allclose adiff = int_abs(arr - arr_back_sc) diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index 3fe74dfb8b..9006ce321c 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -120,7 +120,7 @@ def test_casting(): # Confirm input array is not modified nans = np.isnan(farr) assert_array_equal(nans, np.isnan(farr_orig)) - assert_array_equal(farr[nans == False], farr_orig[nans == False]) + assert_array_equal(farr[nans is False], farr_orig[nans is False]) # Test scalars work and return scalars assert_array_equal(float_to_int(np.float32(0), np.int16), [0]) # Test scalar nan OK @@ -155,7 +155,7 @@ def test_floor_log2(): assert floor_log2(0.75) == -1 assert floor_log2(0.25) == -2 assert floor_log2(0.24) == -3 - assert floor_log2(0) == None + assert floor_log2(0) is None def test_able_int_type(): diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index eb9de3799d..63d2b32a70 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -70,18 +70,18 @@ def test_dep_func(self): dec = self.dep_func func = dec('foo')(func_no_doc) with pytest.deprecated_call(): - assert func() == None + assert func() is None assert func.__doc__ == 'foo\n' func = dec('foo')(func_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert func(1) == None + assert func(1) is None assert len(w) == 1 assert func.__doc__ == 'A docstring\n\nfoo\n' func = dec('foo')(func_doc_long) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert func(1, 2) == None + assert func(1, 2) is None assert len(w) == 1 assert func.__doc__ == 'A docstring\n \n foo\n \n Some text\n' @@ -90,12 +90,12 @@ def test_dep_func(self): assert func.__doc__ == 'foo\n\n* deprecated from version: 1.1\n' with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert func() == None + assert func() is None assert len(w) == 1 func = dec('foo', until='99.4')(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert func() == None + assert func() is None assert len(w) == 1 assert (func.__doc__ == 'foo\n\n* Will raise {} as of version: 99.4\n' @@ -127,13 +127,13 @@ def test_dep_func(self): func = dec('foo', warn_class=UserWarning)(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert func() == None + assert func() is None assert len(w) == 1 assert w[0].category is UserWarning func = dec('foo', error_class=CustomError)(func_no_doc) with pytest.deprecated_call(): - assert func() == None + assert func() is None func = dec('foo', until='1.8', error_class=CustomError)(func_no_doc) with pytest.raises(CustomError): @@ -150,14 +150,14 @@ def test_deprecator_maker(self): func = dec('foo')(func_no_doc) with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: warnings.simplefilter('always') - assert func() == None + assert func() is None assert len(w) == 1 assert w[0].category is UserWarning dec = self.dep_maker(error_class=CustomError) func = dec('foo')(func_no_doc) with pytest.deprecated_call(): - assert func() == None + assert func() is None func = dec('foo', until='1.8')(func_no_doc) with pytest.raises(CustomError): diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index b681a59b0e..0f216091f2 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -164,7 +164,7 @@ def test_subheader(self): assert self.subhdr.get_nframes() == 1 assert (self.subhdr.get_nframes() == len(self.subhdr.subheaders)) - assert self.subhdr._check_affines() == True + assert self.subhdr._check_affines() is True assert_array_almost_equal(np.diag(self.subhdr.get_frame_affine()), np.array([2.20241979, 2.20241979, 3.125, 1.])) assert self.subhdr.get_zooms()[0] == 2.20241978764534 diff --git a/nibabel/tests/test_environment.py b/nibabel/tests/test_environment.py index e0514c337e..19891a607b 100644 --- a/nibabel/tests/test_environment.py +++ b/nibabel/tests/test_environment.py @@ -60,4 +60,4 @@ def test_sys_dir(): elif os.name == 'posix': assert sys_dir == r'/etc/nipy' else: - assert sys_dir == None + assert sys_dir is None diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index a75484159e..da91aaf03a 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -29,8 +29,8 @@ def test_files_spatialimages(): for klass in klasses: file_map = klass.make_file_map() for key, value in file_map.items(): - assert value.filename == None - assert value.fileobj == None + assert value.filename is None + assert value.fileobj is None assert value.pos == 0 # If we can't create new images in memory without loading, bail here if not klass.makeable: @@ -42,8 +42,8 @@ def test_files_spatialimages(): else: img = klass(arr, aff) for key, value in img.file_map.items(): - assert value.filename == None - assert value.fileobj == None + assert value.filename is None + assert value.fileobj is None assert value.pos == 0 diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index 924da5fc9f..07a2627910 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -243,9 +243,9 @@ def test_threshold_heuristic(): # Test for default skip / read heuristic # int assert threshold_heuristic(1, 9, 1, skip_thresh=8) == 'full' - assert threshold_heuristic(1, 9, 1, skip_thresh=7) == None + assert threshold_heuristic(1, 9, 1, skip_thresh=7) is None assert threshold_heuristic(1, 9, 2, skip_thresh=16) == 'full' - assert threshold_heuristic(1, 9, 2, skip_thresh=15) == None + assert threshold_heuristic(1, 9, 2, skip_thresh=15) is None # full slice, smallest step size assert (threshold_heuristic( slice(0, 9, 1), 9, 2, skip_thresh=2) == diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 8dd64f8185..25c5b3e1de 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -274,7 +274,7 @@ def test_analyze_detection(): def wat(hdr): return nils.which_analyze_type(hdr.binaryblock) n1_hdr = Nifti1Header(b'\0' * 348, check=False) - assert wat(n1_hdr) == None + assert wat(n1_hdr) is None n1_hdr['sizeof_hdr'] = 540 assert wat(n1_hdr) == 'nifti2' assert wat(n1_hdr.as_byteswapped()) == 'nifti2' @@ -292,7 +292,7 @@ def wat(hdr): assert wat(n1_hdr) == 'analyze' n1_hdr['sizeof_hdr'] = 0 n1_hdr['magic'] = b'' - assert wat(n1_hdr) == None + assert wat(n1_hdr) is None n1_hdr['magic'] = 'n+1' assert wat(n1_hdr) == 'nifti1' n1_hdr['magic'] = 'ni1' diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index ef663f6e7b..8b6a56d965 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -1131,9 +1131,9 @@ def test_extension_basics(): def test_ext_eq(): ext = Nifti1Extension('comment', '123') assert ext == ext - assert not ext != ext + assert ext == ext ext2 = Nifti1Extension('comment', '124') - assert not ext == ext2 + assert ext != ext2 assert ext != ext2 @@ -1148,7 +1148,7 @@ def test_extension_list(): assert ext_c0 == ext_c1 ext = Nifti1Extension('comment', '123') ext_c1.append(ext) - assert not ext_c0 == ext_c1 + assert ext_c0 != ext_c1 ext_c0.append(ext) assert ext_c0 == ext_c1 @@ -1255,8 +1255,8 @@ def test_nifti_dicom_extension(): 'NiPy'.encode('utf-8')) dcmext = Nifti1DicomExtension(2, dcmbytes_explicit) assert dcmext.__class__ == Nifti1DicomExtension - assert dcmext._guess_implicit_VR() == False - assert dcmext._is_little_endian == True + assert dcmext._guess_implicit_VR() is False + assert dcmext._is_little_endian is True assert dcmext.get_code() == 2 assert dcmext.get_content().PatientID == 'NiPy' assert len(dcmext.get_content().values()) == 1 @@ -1267,7 +1267,7 @@ def test_nifti_dicom_extension(): dcmbytes_implicit = struct.pack('') # Big Endian Nifti1Header dcmext = Nifti1DicomExtension(2, dcmbytes_explicit_be, parent_hdr=hdr_be) assert dcmext.__class__ == Nifti1DicomExtension - assert dcmext._guess_implicit_VR() == False + assert dcmext._guess_implicit_VR() is False assert dcmext.get_code() == 2 assert dcmext.get_content().PatientID == 'NiPy' assert dcmext.get_content()[0x10, 0x20].value == 'NiPy' diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index eac73dd92b..d9d728046e 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -197,7 +197,7 @@ def file_opener(fileish, mode): assert os.path.exists('test.foo') # Check this doesn't add anything to parent - assert not '.foo' in Opener.compress_ext_map + assert '.foo' not in Opener.compress_ext_map def test_file_like_wrapper(): @@ -215,7 +215,7 @@ def test_file_like_wrapper(): fobj.close() assert fobj.closed # Added the fileobj name - assert fobj.name == None + assert fobj.name is None def test_compressionlevel(): diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index 387cebecba..1e652a51c5 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -18,7 +18,7 @@ def assert_good(pkg_name, min_version=None): pkg, have_pkg, setup = optional_package(pkg_name, min_version=min_version) assert have_pkg assert sys.modules[pkg_name] == pkg - assert setup() == None + assert setup() is None def assert_bad(pkg_name, min_version=None): @@ -54,7 +54,7 @@ def raise_Exception(*args, **kwargs): def test_versions(): fake_name = '_a_fake_package' fake_pkg = types.ModuleType(fake_name) - assert not 'fake_pkg' in sys.modules + assert 'fake_pkg' not in sys.modules # Not inserted yet assert_bad(fake_name) try: diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 54c15fde6b..fe607c1982 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -182,7 +182,7 @@ def test_header(): si = np.array( [np.unique(x) for x in hdr.get_data_scaling()]).ravel() assert_almost_equal(si, (1.2903541326522827, 0.0), 5) - assert hdr.get_q_vectors() == None + assert hdr.get_q_vectors() is None assert hdr.get_bvals_bvecs() == (None, None) @@ -525,8 +525,8 @@ def test_diffusion_parameters_v4(): bvals, bvecs = dti_v4_hdr.get_bvals_bvecs() assert_almost_equal(bvals, DTI_PAR_BVALS) # no b-vector info in V4 .PAR files - assert bvecs == None - assert dti_v4_hdr.get_q_vectors() == None + assert bvecs is None + assert dti_v4_hdr.get_q_vectors() is None def test_null_diffusion_params(): @@ -538,7 +538,7 @@ def test_null_diffusion_params(): with suppress_warnings(): hdr = PARRECHeader(gen_info, slice_info, True) assert hdr.get_bvals_bvecs() == (None, None) - assert hdr.get_q_vectors() == None + assert hdr.get_q_vectors() is None def test_epi_params(): @@ -623,11 +623,11 @@ def test__get_uniqe_image_defs(): def test_copy_on_init(): # Test that input dict / array gets copied when making header hdr = PARRECHeader(HDR_INFO, HDR_DEFS) - assert not hdr.general_info is HDR_INFO + assert hdr.general_info is not HDR_INFO hdr.general_info['max_slices'] = 10 assert hdr.general_info['max_slices'] == 10 assert HDR_INFO['max_slices'] == 9 - assert not hdr.image_defs is HDR_DEFS + assert hdr.image_defs is not HDR_DEFS hdr.image_defs['image pixel size'] = 8 assert_array_equal(hdr.image_defs['image pixel size'], 8) assert_array_equal(HDR_DEFS['image pixel size'], 16) @@ -646,11 +646,11 @@ def test_header_copy(): hdr2 = hdr.copy() def assert_copy_ok(hdr1, hdr2): - assert not hdr1 is hdr2 + assert hdr1 is not hdr2 assert hdr1.permit_truncated == hdr2.permit_truncated - assert not hdr1.general_info is hdr2.general_info + assert hdr1.general_info is not hdr2.general_info assert_arr_dict_equal(hdr1.general_info, hdr2.general_info) - assert not hdr1.image_defs is hdr2.image_defs + assert hdr1.image_defs is not hdr2.image_defs assert_structarr_equal(hdr1.image_defs, hdr2.image_defs) assert_copy_ok(hdr, hdr2) @@ -866,8 +866,8 @@ def test_ADC_map(): # general_info indicates it is a diffusion scan, but because it is # a post-processed image, the bvals and bvecs aren't available bvals, bvecs = adc_hdr.get_bvals_bvecs() - assert bvals == None - assert bvecs == None + assert bvals is None + assert bvecs is None def test_alternative_header_field_names(): diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 873059b510..19dd5f6fd0 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -335,8 +335,7 @@ def test_parrec2nii_with_data(): # The data is very close, unless it's the fieldmap if par_root != 'fieldmap': conved_data_lps = flip_axis(conved_img.dataobj, 1) - assert np.allclose(conved_data_lps, - philips_img.dataobj) + assert np.allclose(conved_data_lps, philips_img.dataobj) with InTemporaryDirectory(): # Test some options dti_par = pjoin(BALLS, 'PARREC', 'DTI.PAR') diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 3de2af99dd..e93c358756 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -67,7 +67,7 @@ def test_from_header(): hdr = Header(np.float64, shape=(1, 2, 3), zooms=(3.0, 2.0, 1.0)) copy = Header.from_header(hdr) assert hdr == copy - assert not hdr is copy + assert hdr is not copy class C(object): @@ -233,7 +233,7 @@ def test_images(self): arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) img = self.image_class(arr, None) assert (img.get_fdata() == arr).all() - assert img.affine == None + assert img.affine is None def test_default_header(self): # Check default header is as expected @@ -348,24 +348,24 @@ def test_get_fdata(self): out_data[:] = 42 assert img.get_fdata() is out_data img.uncache() - assert not img.get_fdata() is out_data + assert img.get_fdata() is not out_data # The 42 has gone now. assert (img.get_fdata() == in_data_template).all() # If we can save, we can create a proxy image if not self.can_save: return rt_img = bytesio_round_trip(img) - assert not in_data is rt_img.dataobj + assert in_data is not rt_img.dataobj assert (rt_img.dataobj == in_data).all() out_data = rt_img.get_fdata() assert (out_data == in_data).all() - assert not rt_img.dataobj is out_data + assert rt_img.dataobj is not out_data assert out_data.dtype == np.dtype(np.float64) # cache assert rt_img.get_fdata() is out_data out_data[:] = 42 rt_img.uncache() - assert not rt_img.get_fdata() is out_data + assert rt_img.get_fdata() is not out_data assert (rt_img.get_fdata() == in_data).all() def test_get_data(self): @@ -395,19 +395,19 @@ def test_get_data(self): if not self.can_save: return rt_img = bytesio_round_trip(img) - assert not in_data is rt_img.dataobj + assert in_data is not rt_img.dataobj assert (rt_img.dataobj == in_data).all() with pytest.deprecated_call(): out_data = rt_img.get_data() assert (out_data == in_data).all() - assert not rt_img.dataobj is out_data + assert rt_img.dataobj is not out_data # cache with pytest.deprecated_call(): assert rt_img.get_data() is out_data out_data[:] = 42 rt_img.uncache() with pytest.deprecated_call(): - assert not rt_img.get_data() is out_data + assert rt_img.get_data() is not out_data with pytest.deprecated_call(): assert (rt_img.get_data() == in_data).all() diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 331238db5c..5d7cb5ce73 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -151,9 +151,7 @@ def test_origin_checks(self): pytest.raises(*raiser) # diagnose binary block dxer = self.header_class.diagnose_binaryblock - assert (dxer(hdr.binaryblock) == - 'very large origin values ' - 'relative to dims') + assert dxer(hdr.binaryblock) == 'very large origin values relative to dims' class ImageScalingMixin(object): diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index bcfbfe673d..ad4eb083a2 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -291,8 +291,8 @@ def f(pts): # from vx to mm def test__check_hdr_points_space(): # Test checking routine for points_space input given header # None or voxmm -> no checks, pass through - assert tv._check_hdr_points_space({}, None) == None - assert tv._check_hdr_points_space({}, 'voxmm') == None + assert tv._check_hdr_points_space({}, None) is None + assert tv._check_hdr_points_space({}, 'voxmm') is None # strange value for points_space -> ValueError with pytest.raises(ValueError): tv._check_hdr_points_space({}, 'crazy') @@ -313,7 +313,7 @@ def test__check_hdr_points_space(): tv._check_hdr_points_space(hdr, 'voxel') # This is OK hdr['voxel_size'] = [2, 3, 4] - assert tv._check_hdr_points_space(hdr, 'voxel') == None + assert tv._check_hdr_points_space(hdr, 'voxel') is None # rasmm - check there is an affine, that it matches voxel_size and # voxel_order # no affine @@ -340,8 +340,7 @@ def test__check_hdr_points_space(): # This should be OK good_aff = np.diag([2, 3, 4, 1]) hdr['vox_to_ras'] = good_aff - assert (tv._check_hdr_points_space(hdr, 'rasmm') == - None) + assert tv._check_hdr_points_space(hdr, 'rasmm') is None # Default voxel order of LPS assumed hdr['voxel_order'] = '' # now the RAS affine raises an error @@ -350,8 +349,7 @@ def test__check_hdr_points_space(): # this affine does have LPS voxel order good_lps = np.dot(np.diag([-1, -1, 1, 1]), good_aff) hdr['vox_to_ras'] = good_lps - assert (tv._check_hdr_points_space(hdr, 'rasmm') == - None) + assert tv._check_hdr_points_space(hdr, 'rasmm') is None def test_empty_header(): @@ -461,7 +459,7 @@ def test_aff_to_hdr(): for hdr in ({}, {'version': 2}, {'version': 1}): tv.aff_to_hdr(aff2, hdr, pos_vox=True, set_order=False) assert_array_equal(hdr['voxel_size'], [1, 2, 3]) - assert not 'voxel_order' in hdr + assert 'voxel_order' not in hdr tv.aff_to_hdr(aff2, hdr, pos_vox=False, set_order=True) assert_array_equal(hdr['voxel_size'], [-1, 2, 3]) assert hdr['voxel_order'] == 'RAI' @@ -469,7 +467,7 @@ def test_aff_to_hdr(): assert_array_equal(hdr['voxel_size'], [1, 2, 3]) assert hdr['voxel_order'] == 'RAI' if 'version' in hdr and hdr['version'] == 1: - assert not 'vox_to_ras' in hdr + assert 'vox_to_ras' not in hdr else: assert_array_equal(hdr['vox_to_ras'], aff2) @@ -479,7 +477,7 @@ def test_tv_class(): assert tvf.streamlines == [] assert isinstance(tvf.header, np.ndarray) assert tvf.endianness == tv.native_code - assert tvf.filename == None + assert tvf.filename is None out_f = BytesIO() tvf.to_file(out_f) assert out_f.getvalue() == tv.empty_header().tostring() diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index f052098475..32035db995 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -175,7 +175,7 @@ def test_mappingness(self): for key, val in hdr.items(): assert_array_equal(hdr[key], val) # verify that .get operates as destined - assert hdr.get('nonexistent key') == None + assert hdr.get('nonexistent key') is None assert hdr.get('nonexistent key', 'default') == 'default' assert hdr.get(keys[0]) == vals[0] assert hdr.get(keys[0], 'default') == vals[0] From e3f171bc8c557de3815927af5bbf1d9fce2d7e25 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 6 Feb 2020 11:05:22 -0500 Subject: [PATCH 630/689] MNT: CI YAML formatting, extraneous .coveragerc --- .azure-pipelines/windows.yml | 2 +- .gitignore | 1 + .travis.yml | 2 +- for_testing/.coveragerc | 9 --------- 4 files changed, 3 insertions(+), 11 deletions(-) delete mode 100644 for_testing/.coveragerc diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index c7411a77c7..a73cb7b6de 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -100,7 +100,7 @@ jobs: -I test_rstutils ^ -I test_scaling ^ -I test_wrapstruct ^ - -I test_io + -I test_io displayName: 'Nose tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) - script: | diff --git a/.gitignore b/.gitignore index fd686f2781..e876975c27 100644 --- a/.gitignore +++ b/.gitignore @@ -85,3 +85,4 @@ doc/source/reference venv/ .buildbot.patch .vscode +for_testing/ diff --git a/.travis.yml b/.travis.yml index 483efe29ec..f13b53f5a1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -187,7 +187,7 @@ script: -I test_rstutils \ -I test_scaling \ -I test_wrapstruct \ - -I test_io + -I test_io elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing diff --git a/for_testing/.coveragerc b/for_testing/.coveragerc deleted file mode 100644 index 57747ec0d8..0000000000 --- a/for_testing/.coveragerc +++ /dev/null @@ -1,9 +0,0 @@ -[run] -branch = True -source = nibabel, nisext -include = */nibabel/*, */nisext/* -omit = - */externals/* - */benchmarks/* - */tests/* - nibabel/_version.py From eaab253772078df4e62145c970b54aa965c081b2 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 6 Feb 2020 11:06:16 -0500 Subject: [PATCH 631/689] TEST: Style and formatting --- nibabel/freesurfer/tests/test_io.py | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index b457a6c00d..521923057f 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -92,16 +92,12 @@ def test_geometry(): warnings.filterwarnings('always', category=DeprecationWarning) read_geometry(surf_path, read_metadata=True) - assert( - any('volume information contained' in str(ww.message) - for ww in w)) - assert( - any('extension code' in str(ww.message) for ww in w)) + assert any('volume information contained' in str(ww.message) for ww in w) + assert any('extension code' in str(ww.message) for ww in w) volume_info['head'] = [1, 2] with clear_and_catch_warnings() as w: write_geometry(surf_path, coords, faces, create_stamp, volume_info) - assert( - any('Unknown extension' in str(ww.message) for ww in w)) + assert any('Unknown extension' in str(ww.message) for ww in w) volume_info['a'] = 0 with pytest.raises(ValueError): write_geometry(surf_path, coords, faces, create_stamp, volume_info) @@ -146,7 +142,7 @@ def test_morph_data(): new_path = 'test' write_morph_data(new_path, curv) curv2 = read_morph_data(new_path) - assert np.array_equal (curv2, curv) + assert np.array_equal(curv2, curv) def test_write_morph_data(): @@ -163,7 +159,7 @@ def test_write_morph_data(): with pytest.raises(ValueError): write_morph_data('test.curv', np.zeros(shape), big_num) - # Windows 32-bit overflows Python int + # Windows 32-bit overflows Python int if np.dtype(np.int) != np.dtype(np.int32): with pytest.raises(ValueError): write_morph_data('test.curv', strided_scalar((big_num,))) @@ -272,12 +268,10 @@ def test_write_annot_fill_ctab(): # values back. badannot = (10 * np.arange(nlabels, dtype=np.int32)).reshape(-1, 1) rgbal = np.hstack((rgba, badannot)) - print(labels) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert ( - any('Annotation values in {} will be incorrect'.format( - annot_path) == str(ww.message) for ww in w)) + assert any('Annotation values in {} will be incorrect'.format(annot_path) == str(ww.message) + for ww in w) labels2, rgbal2, names2 = read_annot(annot_path, orig_ids=True) names2 = [n.decode('ascii') for n in names2] assert np.all(np.isclose(rgbal2[:, :4], rgba)) @@ -291,9 +285,8 @@ def test_write_annot_fill_ctab(): rgbal[:, 2] * (2 ** 16)) with clear_and_catch_warnings() as w: write_annot(annot_path, labels, rgbal, names, fill_ctab=False) - assert( - not any('Annotation values in {} will be incorrect'.format( - annot_path) == str(ww.message) for ww in w)) + assert all('Annotation values in {} will be incorrect'.format(annot_path) != str(ww.message) + for ww in w) labels2, rgbal2, names2 = read_annot(annot_path) names2 = [n.decode('ascii') for n in names2] assert np.all(np.isclose(rgbal2[:, :4], rgba)) @@ -356,7 +349,7 @@ def test_label(): assert label.shape[0] <= 163842 labels, scalars = read_label(label_path, True) - assert (np.all(labels == label)) + assert np.all(labels == label) assert len(labels) == len(scalars) From 51f248532abb83d4c5010867d78da3d84fdaa3db Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 6 Feb 2020 11:15:02 -0500 Subject: [PATCH 632/689] CI: Restore import-only test --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index b869d33947..297604e49d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,6 +36,11 @@ jobs: - python: 3.5 env: - DEPENDS="-r requirements.txt" + # Clean install + - python: 3.5 + env: + - DEPENDS="" + - CHECK_TYPE=skiptests # Absolute minimum dependencies - python: 3.5 env: @@ -105,6 +110,7 @@ install: fi # Basic import check - python -c 'import nibabel; print(nibabel.__version__)' + - if [ "$CHECK_TYPE" == "skiptests" ]; then exit 0; fi before_script: # Point to nibabel data directory From a9bbe82c12ef4a514978c65e82d3d062af6bd078 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Thu, 6 Feb 2020 11:41:58 -0500 Subject: [PATCH 633/689] small cleaning after review --- nibabel/tests/test_arraywriters.py | 4 ++-- nibabel/tests/test_casting.py | 2 +- nibabel/tests/test_deprecator.py | 36 +++++++++++++---------------- nibabel/tests/test_ecat.py | 5 ++-- nibabel/tests/test_nifti1.py | 6 ++--- nibabel/tests/test_spatialimages.py | 15 +++++++----- nibabel/tests/test_spm99analyze.py | 4 ++-- nibabel/tests/test_volumeutils.py | 6 ++--- 8 files changed, 37 insertions(+), 41 deletions(-) diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 14e9025c21..1a1c4eb156 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -640,7 +640,7 @@ def test_writer_maker(): assert (aw.slope, aw.inter) == (1, 0) aw.calc_scale() slope, inter = aw.slope, aw.inter - assert (slope, inter) != (1, 0) + assert not (slope, inter) == (1, 0) # Should run by default aw = make_array_writer(arr, np.int16) assert (aw.slope, aw.inter) == (slope, inter) @@ -704,7 +704,7 @@ def test_int_int_slope(): aw = SlopeArrayWriter(arr, out_dt) except ScalingError: continue - assert aw.slope != 0 + assert not aw.slope == 0 arr_back_sc = round_trip(aw) # integer allclose adiff = int_abs(arr - arr_back_sc) diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index 9006ce321c..c4f9b9ba9e 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -120,7 +120,7 @@ def test_casting(): # Confirm input array is not modified nans = np.isnan(farr) assert_array_equal(nans, np.isnan(farr_orig)) - assert_array_equal(farr[nans is False], farr_orig[nans is False]) + assert_array_equal(farr[nans == False], farr_orig[nans == False]) # Test scalars work and return scalars assert_array_equal(float_to_int(np.float32(0), np.int16), [0]) # Test scalar nan OK diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index 63d2b32a70..9c15a6566e 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -34,9 +34,9 @@ def test__add_dep_doc(): assert _add_dep_doc('bar\n\n', 'foo') == 'bar\n\nfoo\n' assert _add_dep_doc('bar\n \n', 'foo') == 'bar\n\nfoo\n' assert (_add_dep_doc(' bar\n\nSome explanation', 'foo\nbaz') == - ' bar\n\nfoo\nbaz\n\nSome explanation\n') + ' bar\n\nfoo\nbaz\n\nSome explanation\n') assert (_add_dep_doc(' bar\n\n Some explanation', 'foo\nbaz') == - ' bar\n \n foo\n baz\n \n Some explanation\n') + ' bar\n \n foo\n baz\n \n Some explanation\n') class CustomError(Exception): @@ -73,14 +73,12 @@ def test_dep_func(self): assert func() is None assert func.__doc__ == 'foo\n' func = dec('foo')(func_doc) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call() as w: assert func(1) is None assert len(w) == 1 assert func.__doc__ == 'A docstring\n\nfoo\n' func = dec('foo')(func_doc_long) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call() as w: assert func(1, 2) is None assert len(w) == 1 assert func.__doc__ == 'A docstring\n \n foo\n \n Some text\n' @@ -88,13 +86,11 @@ def test_dep_func(self): # Try some since and until versions func = dec('foo', '1.1')(func_no_doc) assert func.__doc__ == 'foo\n\n* deprecated from version: 1.1\n' - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call() as w: assert func() is None assert len(w) == 1 func = dec('foo', until='99.4')(func_no_doc) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.deprecated_call() as w: assert func() is None assert len(w) == 1 assert (func.__doc__ == @@ -104,22 +100,22 @@ def test_dep_func(self): with pytest.raises(ExpiredDeprecationError): func() assert (func.__doc__ == - 'foo\n\n* Raises {} as of version: 1.8\n' - .format(ExpiredDeprecationError)) + 'foo\n\n* Raises {} as of version: 1.8\n' + .format(ExpiredDeprecationError)) func = dec('foo', '1.2', '1.8')(func_no_doc) with pytest.raises(ExpiredDeprecationError): func() assert (func.__doc__ == - 'foo\n\n* deprecated from version: 1.2\n' - '* Raises {} as of version: 1.8\n' - .format(ExpiredDeprecationError)) + 'foo\n\n* deprecated from version: 1.2\n' + '* Raises {} as of version: 1.8\n' + .format(ExpiredDeprecationError)) func = dec('foo', '1.2', '1.8')(func_doc_long) assert (func.__doc__ == - 'A docstring\n \n foo\n \n' - ' * deprecated from version: 1.2\n' - ' * Raises {} as of version: 1.8\n \n' - ' Some text\n' - .format(ExpiredDeprecationError)) + 'A docstring\n \n foo\n \n' + ' * deprecated from version: 1.2\n' + ' * Raises {} as of version: 1.8\n \n' + ' Some text\n' + .format(ExpiredDeprecationError)) with pytest.raises(ExpiredDeprecationError): func() diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index 0f216091f2..918cffc52b 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -21,7 +21,7 @@ from numpy.testing import assert_array_equal, assert_array_almost_equal -from ..testing_pytest import data_path, suppress_warnings, clear_and_catch_warnings +from ..testing_pytest import data_path, suppress_warnings from ..tmpdirs import InTemporaryDirectory from .test_wrapstruct import _TestWrapStructBase @@ -271,8 +271,7 @@ def test_mlist_regression(self): def test_from_filespec_deprecation(): # Check from_filespec raises Deprecation - with clear_and_catch_warnings() as w: - warnings.simplefilter('always', DeprecationWarning) + with pytest.deprecated_call() as w: # No warning for standard load img_loaded = EcatImage.load(ecat_file) assert len(w) == 0 diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 8b6a56d965..d5dff4a4e4 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -1131,10 +1131,10 @@ def test_extension_basics(): def test_ext_eq(): ext = Nifti1Extension('comment', '123') assert ext == ext - assert ext == ext + assert not ext != ext ext2 = Nifti1Extension('comment', '124') assert ext != ext2 - assert ext != ext2 + assert not ext == ext2 def test_extension_codes(): @@ -1148,7 +1148,7 @@ def test_extension_list(): assert ext_c0 == ext_c1 ext = Nifti1Extension('comment', '123') ext_c1.append(ext) - assert ext_c0 != ext_c1 + assert not ext_c0 == ext_c1 ext_c0.append(ext) assert ext_c0 == ext_c1 diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index e93c358756..82da89b3d7 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -379,10 +379,10 @@ def test_get_data(self): img[0, 0, 0] # Make sure the right message gets raised: assert (str(exception_manager.value) == - "Cannot slice image objects; consider using " - "`img.slicer[slice]` to generate a sliced image (see " - "documentation for caveats) or slicing image array data " - "with `img.dataobj[slice]` or `img.get_fdata()[slice]`") + "Cannot slice image objects; consider using " + "`img.slicer[slice]` to generate a sliced image (see " + "documentation for caveats) or slicing image array data " + "with `img.dataobj[slice]` or `img.get_fdata()[slice]`") assert in_data is img.dataobj with pytest.deprecated_call(): out_data = img.get_data() @@ -648,7 +648,10 @@ def test_load_mmap(self): def test_header_deprecated(): - with pytest.deprecated_call(): + with pytest.deprecated_call() as w: class MyHeader(Header): pass - MyHeader() \ No newline at end of file + + assert len(w) == 0 + MyHeader() + assert len(w) == 1 \ No newline at end of file diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 5d7cb5ce73..977fef3071 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -146,8 +146,8 @@ def test_origin_checks(self): fhdr, message, raiser = self.log_chk(hdr, 20) assert fhdr == hdr assert (message == 'very large origin values ' - 'relative to dims; leaving as set, ' - 'ignoring for affine') + 'relative to dims; leaving as set, ' + 'ignoring for affine') pytest.raises(*raiser) # diagnose binary block dxer = self.header_class.diagnose_binaryblock diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 1591dae005..dca5053d74 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -57,8 +57,7 @@ assert_array_equal) import pytest -from ..testing_pytest import (assert_dt_equal, assert_allclose_safely, - suppress_warnings, clear_and_catch_warnings) +from ..testing_pytest import assert_dt_equal, assert_allclose_safely, suppress_warnings #: convenience variables for numpy types FLOAT_TYPES = np.sctypes['float'] @@ -1019,8 +1018,7 @@ def test_fname_ext_ul_case(): def test_allopen(): # This import into volumeutils is for compatibility. The code is the # ``openers`` module. - with clear_and_catch_warnings() as w: - warnings.filterwarnings('once', category=DeprecationWarning) + with pytest.deprecated_call() as w: # Test default mode is 'rb' fobj = allopen(__file__) # Check we got the deprecation warning From 19b55b9319d6f0366a26323c796f64950b053cb2 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Thu, 6 Feb 2020 11:46:44 -0500 Subject: [PATCH 634/689] small edit --- nibabel/tests/test_deprecator.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index 9c15a6566e..c508795cf9 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -144,11 +144,10 @@ class TestDeprecatorMaker(object): def test_deprecator_maker(self): dec = self.dep_maker(warn_class=UserWarning) func = dec('foo')(func_no_doc) - with clear_and_catch_warnings(modules=[_OWN_MODULE]) as w: - warnings.simplefilter('always') + with pytest.warns(UserWarning) as w: + # warnings.simplefilter('always') assert func() is None assert len(w) == 1 - assert w[0].category is UserWarning dec = self.dep_maker(error_class=CustomError) func = dec('foo')(func_no_doc) From 6838af6273f4a01e5318ec9fd880db1ceee9988b Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Thu, 6 Feb 2020 11:49:47 -0500 Subject: [PATCH 635/689] Update nibabel/tests/test_image_api.py Co-Authored-By: Chris Markiewicz --- nibabel/tests/test_image_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 34725ba186..f5aeb53822 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -115,7 +115,7 @@ def validate_header_deprecated(self, imaker, params): img = imaker() with pytest.deprecated_call(): hdr = img.get_header() - assert hdr is img.header + assert hdr is img.header def validate_filenames(self, imaker, params): # Validate the filename, file_map interface From 5064f9a40211388c9913cd4c8cc4f7edd3c2a635 Mon Sep 17 00:00:00 2001 From: Or Duek Date: Thu, 6 Feb 2020 17:14:24 -0500 Subject: [PATCH 636/689] Update nibabel/freesurfer/tests/test_mghformat.py Co-Authored-By: Chris Markiewicz --- nibabel/freesurfer/tests/test_mghformat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index d70c4e9742..e3090ca5fa 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -217,7 +217,7 @@ def test_header_updating(): assert_almost_equal(mgz.affine, exp_aff, 6) assert_almost_equal(hdr.get_affine(), exp_aff, 6) # Test that initial wonky header elements have not changed - assert np.array_equal(hdr['delta'], 1) + assert np.all(hdr['delta'] == 1) assert_almost_equal(hdr['Mdc'].T, exp_aff[:3, :3]) # Save, reload, same thing img_fobj = io.BytesIO() From d9c385a2ef5befbd58778f0a784d319cd09cb05b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 6 Feb 2020 20:27:30 -0500 Subject: [PATCH 637/689] CI: Enable doctests --- .azure-pipelines/windows.yml | 2 +- .travis.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 849d36745d..8de4ed3466 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -114,7 +114,7 @@ jobs: mkdir for_testing cd for_testing cp ../.coveragerc . - pytest --cov nibabel -v --pyargs nibabel + pytest --doctest-modules --cov nibabel -v --pyargs nibabel displayName: 'Pytest tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'test')) - script: | diff --git a/.travis.yml b/.travis.yml index 67131f21c8..711601764b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -206,7 +206,7 @@ script: mkdir for_testing cd for_testing cp ../.coveragerc . - pytest --cov nibabel -v --pyargs nibabel + pytest --doctest-modules --cov nibabel -v --pyargs nibabel else false fi From 0a4944976e7a16988026644ac9db4721e6111efb Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 6 Feb 2020 20:37:06 -0500 Subject: [PATCH 638/689] TEST: Use numpy 1.13 legacy print settings --- nibabel/conftest.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 nibabel/conftest.py diff --git a/nibabel/conftest.py b/nibabel/conftest.py new file mode 100644 index 0000000000..243ea957e2 --- /dev/null +++ b/nibabel/conftest.py @@ -0,0 +1,15 @@ +import pytest + + +@pytest.fixture(autouse=True, scope="session") +def set_printopts(): + import numpy as np + from distutils.version import LooseVersion + + if LooseVersion(np.__version__) >= LooseVersion("1.14"): + legacy_printopt = np.get_printoptions().get("legacy") + np.set_printoptions(legacy="1.13") + yield + np.set_printoptions(legacy=legacy_printopt) + else: + yield From 40d24719afe76d1ba8ffd6bf454b6493a6bb71c0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 7 Feb 2020 17:46:54 -0500 Subject: [PATCH 639/689] TEST: Convert nibabel.nicom to unittest/pytest --- nibabel/nicom/tests/__init__.py | 5 +- nibabel/nicom/tests/test_csareader.py | 79 +++---- nibabel/nicom/tests/test_dicomreaders.py | 44 ++-- nibabel/nicom/tests/test_dicomwrappers.py | 270 ++++++++++++---------- nibabel/nicom/tests/test_structreader.py | 31 ++- nibabel/nicom/tests/test_utils.py | 52 ++--- nibabel/pydicom_compat.py | 15 +- nibabel/tests/test_nifti1.py | 3 +- 8 files changed, 256 insertions(+), 243 deletions(-) diff --git a/nibabel/nicom/tests/__init__.py b/nibabel/nicom/tests/__init__.py index c7c3753010..127ad5a6e0 100644 --- a/nibabel/nicom/tests/__init__.py +++ b/nibabel/nicom/tests/__init__.py @@ -1 +1,4 @@ -# init to allow relative imports in tests +from ...pydicom_compat import have_dicom +import unittest + +dicom_test = unittest.skipUnless(have_dicom, "Could not import dicom or pydicom") diff --git a/nibabel/nicom/tests/test_csareader.py b/nibabel/nicom/tests/test_csareader.py index a6bf589e90..1692aad622 100644 --- a/nibabel/nicom/tests/test_csareader.py +++ b/nibabel/nicom/tests/test_csareader.py @@ -7,15 +7,13 @@ import numpy as np +from ...pydicom_compat import pydicom from .. import csareader as csa from .. import dwiparams as dwp -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) - -from ...testing import skipif - -from nibabel.pydicom_compat import dicom_test, pydicom -from .test_dicomwrappers import (IO_DATA_PATH, DATA) +import pytest +from . import dicom_test +from .test_dicomwrappers import IO_DATA_PATH, DATA CSA2_B0 = open(pjoin(IO_DATA_PATH, 'csa2_b0.bin'), 'rb').read() CSA2_B1000 = open(pjoin(IO_DATA_PATH, 'csa2_b1000.bin'), 'rb').read() @@ -27,59 +25,61 @@ @dicom_test def test_csa_header_read(): hdr = csa.get_csa_header(DATA, 'image') - assert_equal(hdr['n_tags'], 83) - assert_equal(csa.get_csa_header(DATA, 'series')['n_tags'], 65) - assert_raises(ValueError, csa.get_csa_header, DATA, 'xxxx') - assert_true(csa.is_mosaic(hdr)) + assert hdr['n_tags'] == 83 + assert csa.get_csa_header(DATA, 'series')['n_tags'] == 65 + with pytest.raises(ValueError): + csa.get_csa_header(DATA, 'xxxx') + assert csa.is_mosaic(hdr) # Get a shallow copy of the data, lacking the CSA marker # Need to do it this way because del appears broken in pydicom 0.9.7 data2 = pydicom.dataset.Dataset() for element in DATA: if (element.tag.group, element.tag.elem) != (0x29, 0x10): data2.add(element) - assert_equal(csa.get_csa_header(data2, 'image'), None) + assert csa.get_csa_header(data2, 'image') is None # Add back the marker - CSA works again data2[(0x29, 0x10)] = DATA[(0x29, 0x10)] - assert_true(csa.is_mosaic(csa.get_csa_header(data2, 'image'))) + assert csa.is_mosaic(csa.get_csa_header(data2, 'image')) def test_csas0(): for csa_str in (CSA2_B0, CSA2_B1000): csa_info = csa.read(csa_str) - assert_equal(csa_info['type'], 2) - assert_equal(csa_info['n_tags'], 83) + assert csa_info['type'] == 2 + assert csa_info['n_tags'] == 83 tags = csa_info['tags'] - assert_equal(len(tags), 83) + assert len(tags) == 83 n_o_m = tags['NumberOfImagesInMosaic'] - assert_equal(n_o_m['items'], [48]) + assert n_o_m['items'] == [48] csa_info = csa.read(CSA2_B1000) b_matrix = csa_info['tags']['B_matrix'] - assert_equal(len(b_matrix['items']), 6) + assert len(b_matrix['items']) == 6 b_value = csa_info['tags']['B_value'] - assert_equal(b_value['items'], [1000]) + assert b_value['items'] == [1000] def test_csa_len0(): # We did get a failure for item with item_len of 0 - gh issue #92 csa_info = csa.read(CSA2_0len) - assert_equal(csa_info['type'], 2) - assert_equal(csa_info['n_tags'], 44) + assert csa_info['type'] == 2 + assert csa_info['n_tags'] == 44 tags = csa_info['tags'] - assert_equal(len(tags), 44) + assert len(tags) == 44 def test_csa_nitem(): # testing csa.read's ability to raise an error when n_items >= 200 - assert_raises(csa.CSAReadError, csa.read, CSA_STR_1001n_items) + with pytest.raises(csa.CSAReadError): + csa.read(CSA_STR_1001n_items) # OK when < 1000 csa_info = csa.read(CSA_STR_valid) - assert_equal(len(csa_info['tags']), 1) + assert len(csa_info['tags']) == 1 # OK after changing module global n_items_thresh = csa.MAX_CSA_ITEMS try: csa.MAX_CSA_ITEMS = 2000 csa_info = csa.read(CSA_STR_1001n_items) - assert_equal(len(csa_info['tags']), 1) + assert len(csa_info['tags']) == 1 finally: csa.MAX_CSA_ITEMS = n_items_thresh @@ -88,32 +88,30 @@ def test_csa_params(): for csa_str in (CSA2_B0, CSA2_B1000): csa_info = csa.read(csa_str) n_o_m = csa.get_n_mosaic(csa_info) - assert_equal(n_o_m, 48) + assert n_o_m == 48 snv = csa.get_slice_normal(csa_info) - assert_equal(snv.shape, (3,)) - assert_true(np.allclose(1, - np.sqrt((snv * snv).sum()))) + assert snv.shape == (3,) + assert np.allclose(1, np.sqrt((snv * snv).sum())) amt = csa.get_acq_mat_txt(csa_info) - assert_equal(amt, '128p*128') + assert amt == '128p*128' csa_info = csa.read(CSA2_B0) b_matrix = csa.get_b_matrix(csa_info) - assert_equal(b_matrix, None) + assert b_matrix is None b_value = csa.get_b_value(csa_info) - assert_equal(b_value, 0) + assert b_value == 0 g_vector = csa.get_g_vector(csa_info) - assert_equal(g_vector, None) + assert g_vector is None csa_info = csa.read(CSA2_B1000) b_matrix = csa.get_b_matrix(csa_info) - assert_equal(b_matrix.shape, (3, 3)) + assert b_matrix.shape == (3, 3) # check (by absence of error) that the B matrix is positive # semi-definite. dwp.B2q(b_matrix) # no error b_value = csa.get_b_value(csa_info) - assert_equal(b_value, 1000) + assert b_value == 1000 g_vector = csa.get_g_vector(csa_info) - assert_equal(g_vector.shape, (3,)) - assert_true( - np.allclose(1, np.sqrt((g_vector * g_vector).sum()))) + assert g_vector.shape == (3,) + assert np.allclose(1, np.sqrt((g_vector * g_vector).sum())) def test_ice_dims(): @@ -124,9 +122,8 @@ def test_ice_dims(): for csa_str, ex_dims in ((CSA2_B0, ex_dims0), (CSA2_B1000, ex_dims1)): csa_info = csa.read(csa_str) - assert_equal(csa.get_ice_dims(csa_info), - ex_dims) - assert_equal(csa.get_ice_dims({}), None) + assert csa.get_ice_dims(csa_info) == ex_dims + assert csa.get_ice_dims({}) is None @dicom_test @@ -138,4 +135,4 @@ def test_missing_csa_elem(): csa_tag = pydicom.dataset.Tag(0x29, 0x1010) del dcm[csa_tag] hdr = csa.get_csa_header(dcm, 'image') - assert_equal(hdr, None) + assert hdr is None diff --git a/nibabel/nicom/tests/test_dicomreaders.py b/nibabel/nicom/tests/test_dicomreaders.py index cb03aae74b..167cb26de6 100644 --- a/nibabel/nicom/tests/test_dicomreaders.py +++ b/nibabel/nicom/tests/test_dicomreaders.py @@ -2,20 +2,17 @@ """ -from os.path import join as pjoin, abspath +from os.path import join as pjoin import numpy as np from .. import dicomreaders as didr +from ...pydicom_compat import pydicom -from nibabel.pydicom_compat import dicom_test, pydicom +import pytest +from . import dicom_test -from .test_dicomwrappers import (EXPECTED_AFFINE, - EXPECTED_PARAMS, - IO_DATA_PATH, - DATA) - -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +from .test_dicomwrappers import EXPECTED_AFFINE, EXPECTED_PARAMS, IO_DATA_PATH, DATA from numpy.testing import assert_array_equal, assert_array_almost_equal @@ -24,7 +21,7 @@ def test_read_dwi(): img = didr.mosaic_to_nii(DATA) arr = img.get_data() - assert_equal(arr.shape, (128, 128, 48)) + assert arr.shape == (128, 128, 48) assert_array_almost_equal(img.affine, EXPECTED_AFFINE) @@ -32,11 +29,12 @@ def test_read_dwi(): def test_read_dwis(): data, aff, bs, gs = didr.read_mosaic_dwi_dir(IO_DATA_PATH, 'siemens_dwi_*.dcm.gz') - assert_equal(data.ndim, 4) + assert data.ndim == 4 assert_array_almost_equal(aff, EXPECTED_AFFINE) assert_array_almost_equal(bs, (0, EXPECTED_PARAMS[0])) assert_array_almost_equal(gs, (np.zeros((3,)), EXPECTED_PARAMS[1])) - assert_raises(IOError, didr.read_mosaic_dwi_dir, 'improbable') + with pytest.raises(IOError): + didr.read_mosaic_dwi_dir('improbable') @dicom_test @@ -53,29 +51,21 @@ def test_passing_kwds(): dicom_kwargs=dict(force=True)) assert_array_equal(data, data2) # This should raise an error in pydicom.dicomio.read_file - assert_raises(TypeError, - func, - IO_DATA_PATH, - dwi_glob, - dicom_kwargs=dict(not_a_parameter=True)) + with pytest.raises(TypeError): + func(IO_DATA_PATH, dwi_glob, dicom_kwargs=dict(not_a_parameter=True)) # These are invalid dicoms, so will raise an error unless force=True - assert_raises(pydicom.filereader.InvalidDicomError, - func, - IO_DATA_PATH, - csa_glob) + with pytest.raises(pydicom.filereader.InvalidDicomError): + func(IO_DATA_PATH, csa_glob) # But here, we catch the error because the dicoms are in the wrong # format - assert_raises(didr.DicomReadError, - func, - IO_DATA_PATH, - csa_glob, - dicom_kwargs=dict(force=True)) + with pytest.raises(didr.DicomReadError): + func(IO_DATA_PATH, csa_glob, dicom_kwargs=dict(force=True)) @dicom_test def test_slices_to_series(): dicom_files = (pjoin(IO_DATA_PATH, "%d.dcm" % i) for i in range(2)) wrappers = [didr.wrapper_from_file(f) for f in dicom_files] series = didr.slices_to_series(wrappers) - assert_equal(len(series), 1) - assert_equal(len(series[0]), 2) + assert len(series) == 1 + assert len(series[0]) == 2 diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index c78249c381..0bb875002b 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -9,18 +9,17 @@ import numpy as np -from nibabel.pydicom_compat import (have_dicom, pydicom, read_file, dicom_test, - tag_for_keyword) +from nibabel.pydicom_compat import have_dicom, pydicom, read_file, tag_for_keyword from .. import dicomwrappers as didw from .. import dicomreaders as didr from ...volumeutils import endian_codes +import pytest from unittest import TestCase -from nose.tools import (assert_true, assert_false, assert_equal, - assert_not_equal, assert_raises) +from . import dicom_test -from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_warns +from numpy.testing import assert_array_equal, assert_array_almost_equal from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data IO_DATA_PATH = pjoin(dirname(__file__), 'data') @@ -71,31 +70,37 @@ def test_wrappers(): (didw.MosaicWrapper, ({}, None, 10)), (didw.MultiframeWrapper, (multi_minimal,))): dw = maker(*args) - assert_equal(dw.get('InstanceNumber'), None) - assert_equal(dw.get('AcquisitionNumber'), None) - assert_raises(KeyError, dw.__getitem__, 'not an item') - assert_raises(didw.WrapperError, dw.get_data) - assert_raises(didw.WrapperError, dw.get_affine) - assert_raises(TypeError, maker) + assert dw.get('InstanceNumber') is None + assert dw.get('AcquisitionNumber') is None + with pytest.raises(KeyError): + dw['not an item'] + with pytest.raises(didw.WrapperError): + dw.get_data() + with pytest.raises(didw.WrapperError): + dw.affine + with pytest.raises(TypeError): + maker() # Check default attributes if not maker is didw.MosaicWrapper: - assert_false(dw.is_mosaic) - assert_equal(dw.b_matrix, None) - assert_equal(dw.q_vector, None) + assert not dw.is_mosaic + assert dw.b_matrix is None + assert dw.q_vector is None for maker in (didw.wrapper_from_data, didw.Wrapper, didw.SiemensWrapper, didw.MosaicWrapper ): dw = maker(DATA) - assert_equal(dw.get('InstanceNumber'), 2) - assert_equal(dw.get('AcquisitionNumber'), 2) - assert_raises(KeyError, dw.__getitem__, 'not an item') + assert dw.get('InstanceNumber') == 2 + assert dw.get('AcquisitionNumber') == 2 + with pytest.raises(KeyError): + dw['not an item'] for maker in (didw.MosaicWrapper, didw.wrapper_from_data): dw = maker(DATA) - assert_true(dw.is_mosaic) + assert dw.is_mosaic # DATA is not a Multiframe DICOM file - assert_raises(didw.WrapperError, didw.MultiframeWrapper, DATA) + with pytest.raises(didw.WrapperError): + didw.MultiframeWrapper(DATA) def test_get_from_wrapper(): @@ -103,12 +108,13 @@ def test_get_from_wrapper(): # data dcm_data = {'some_key': 'some value'} dw = didw.Wrapper(dcm_data) - assert_equal(dw.get('some_key'), 'some value') - assert_equal(dw.get('some_other_key'), None) + assert dw.get('some_key') == 'some value' + assert dw.get('some_other_key') is None # Getitem uses the same dictionary access - assert_equal(dw['some_key'], 'some value') + assert dw['some_key'] == 'some value' # And raises a WrapperError for missing keys - assert_raises(KeyError, dw.__getitem__, 'some_other_key') + with pytest.raises(KeyError): + dw['some_other_key'] # Test we don't use attributes for get class FakeData(dict): @@ -116,7 +122,7 @@ class FakeData(dict): d = FakeData() d.some_key = 'another bit of data' dw = didw.Wrapper(d) - assert_equal(dw.get('some_key'), None) + assert dw.get('some_key') is None # Check get defers to dcm_data get class FakeData2(object): @@ -126,7 +132,7 @@ def get(self, key, default): d = FakeData2() d.some_key = 'another bit of data' dw = didw.Wrapper(d) - assert_equal(dw.get('some_key'), 1) + assert dw.get('some_key') == 1 @dicom_test @@ -134,36 +140,40 @@ def test_wrapper_from_data(): # test wrapper from data, wrapper from file for dw in (didw.wrapper_from_data(DATA), didw.wrapper_from_file(DATA_FILE)): - assert_equal(dw.get('InstanceNumber'), 2) - assert_equal(dw.get('AcquisitionNumber'), 2) - assert_raises(KeyError, dw.__getitem__, 'not an item') - assert_true(dw.is_mosaic) + assert dw.get('InstanceNumber') == 2 + assert dw.get('AcquisitionNumber') == 2 + with pytest.raises(KeyError): + dw['not an item'] + assert dw.is_mosaic assert_array_almost_equal( - np.dot(didr.DPCS_TO_TAL, dw.get_affine()), + np.dot(didr.DPCS_TO_TAL, dw.affine), EXPECTED_AFFINE) for dw in (didw.wrapper_from_data(DATA_PHILIPS), didw.wrapper_from_file(DATA_FILE_PHILIPS)): - assert_equal(dw.get('InstanceNumber'), 1) - assert_equal(dw.get('AcquisitionNumber'), 3) - assert_raises(KeyError, dw.__getitem__, 'not an item') - assert_true(dw.is_multiframe) + assert dw.get('InstanceNumber') == 1 + assert dw.get('AcquisitionNumber') == 3 + with pytest.raises(KeyError): + dw['not an item'] + assert dw.is_multiframe # Another CSA file dw = didw.wrapper_from_file(DATA_FILE_SLC_NORM) - assert_true(dw.is_mosaic) + assert dw.is_mosaic # Check that multiframe requires minimal set of DICOM tags fake_data = dict() fake_data['SOPClassUID'] = '1.2.840.10008.5.1.4.1.1.4.2' dw = didw.wrapper_from_data(fake_data) - assert_false(dw.is_multiframe) + assert not dw.is_multiframe # use the correct SOPClassUID fake_data['SOPClassUID'] = '1.2.840.10008.5.1.4.1.1.4.1' - assert_raises(didw.WrapperError, didw.wrapper_from_data, fake_data) + with pytest.raises(didw.WrapperError): + didw.wrapper_from_data(fake_data) fake_data['PerFrameFunctionalGroupsSequence'] = [None] - assert_raises(didw.WrapperError, didw.wrapper_from_data, fake_data) + with pytest.raises(didw.WrapperError): + didw.wrapper_from_data(fake_data) fake_data['SharedFunctionalGroupsSequence'] = [None] # minimal set should now be met dw = didw.wrapper_from_data(fake_data) - assert_true(dw.is_multiframe) + assert dw.is_multiframe @dicom_test @@ -179,19 +189,18 @@ def test_wrapper_args_kwds(): assert_array_equal(data, dcm2.get_data()) # Trying to read non-dicom file raises pydicom error, usually csa_fname = pjoin(IO_DATA_PATH, 'csa2_b0.bin') - assert_raises(pydicom.filereader.InvalidDicomError, - didw.wrapper_from_file, - csa_fname) + with pytest.raises(pydicom.filereader.InvalidDicomError): + didw.wrapper_from_file(csa_fname) # We can force the read, in which case rubbish returns dcm_malo = didw.wrapper_from_file(csa_fname, force=True) - assert_false(dcm_malo.is_mosaic) + assert not dcm_malo.is_mosaic @dicom_test def test_dwi_params(): dw = didw.wrapper_from_data(DATA) b_matrix = dw.b_matrix - assert_equal(b_matrix.shape, (3, 3)) + assert b_matrix.shape == (3, 3) q = dw.q_vector b = np.sqrt(np.sum(q * q)) # vector norm g = q / b @@ -204,9 +213,9 @@ def test_q_vector_etc(): # Test diffusion params in wrapper classes # Default is no q_vector, b_value, b_vector dw = didw.Wrapper(DATA) - assert_equal(dw.q_vector, None) - assert_equal(dw.b_value, None) - assert_equal(dw.b_vector, None) + assert dw.q_vector is None + assert dw.b_value is None + assert dw.b_vector is None for pos in range(3): q_vec = np.zeros((3,)) q_vec[pos] = 10. @@ -214,12 +223,12 @@ def test_q_vector_etc(): dw = didw.Wrapper(DATA) dw.q_vector = q_vec assert_array_equal(dw.q_vector, q_vec) - assert_equal(dw.b_value, 10) + assert dw.b_value == 10 assert_array_equal(dw.b_vector, q_vec / 10.) # Reset wrapped dicom to refresh one_time property dw = didw.Wrapper(DATA) dw.q_vector = np.array([0, 0, 1e-6]) - assert_equal(dw.b_value, 0) + assert dw.b_value == 0 assert_array_equal(dw.b_vector, np.zeros((3,))) # Test MosaicWrapper sdw = didw.MosaicWrapper(DATA) @@ -230,7 +239,7 @@ def test_q_vector_etc(): # Reset wrapped dicom to refresh one_time property sdw = didw.MosaicWrapper(DATA) sdw.q_vector = np.array([0, 0, 1e-6]) - assert_equal(sdw.b_value, 0) + assert sdw.b_value == 0 assert_array_equal(sdw.b_vector, np.zeros((3,))) @@ -238,51 +247,51 @@ def test_q_vector_etc(): def test_vol_matching(): # make the Siemens wrapper, check it compares True against itself dw_siemens = didw.wrapper_from_data(DATA) - assert_true(dw_siemens.is_mosaic) - assert_true(dw_siemens.is_csa) - assert_true(dw_siemens.is_same_series(dw_siemens)) + assert dw_siemens.is_mosaic + assert dw_siemens.is_csa + assert dw_siemens.is_same_series(dw_siemens) # make plain wrapper, compare against itself dw_plain = didw.Wrapper(DATA) - assert_false(dw_plain.is_mosaic) - assert_false(dw_plain.is_csa) - assert_true(dw_plain.is_same_series(dw_plain)) + assert not dw_plain.is_mosaic + assert not dw_plain.is_csa + assert dw_plain.is_same_series(dw_plain) # specific vs plain wrapper compares False, because the Siemens # wrapper has more non-empty information - assert_false(dw_plain.is_same_series(dw_siemens)) + assert not dw_plain.is_same_series(dw_siemens) # and this should be symmetric - assert_false(dw_siemens.is_same_series(dw_plain)) + assert not dw_siemens.is_same_series(dw_plain) # we can even make an empty wrapper. This compares True against # itself but False against the others dw_empty = didw.Wrapper({}) - assert_true(dw_empty.is_same_series(dw_empty)) - assert_false(dw_empty.is_same_series(dw_plain)) - assert_false(dw_plain.is_same_series(dw_empty)) + assert dw_empty.is_same_series(dw_empty) + assert not dw_empty.is_same_series(dw_plain) + assert not dw_plain.is_same_series(dw_empty) # Just to check the interface, make a pretend signature-providing # object. class C(object): series_signature = {} - assert_true(dw_empty.is_same_series(C())) + assert dw_empty.is_same_series(C()) # make the Philips wrapper, check it compares True against itself dw_philips = didw.wrapper_from_data(DATA_PHILIPS) - assert_true(dw_philips.is_multiframe) - assert_true(dw_philips.is_same_series(dw_philips)) + assert dw_philips.is_multiframe + assert dw_philips.is_same_series(dw_philips) # make plain wrapper, compare against itself dw_plain_philips = didw.Wrapper(DATA) - assert_false(dw_plain_philips.is_multiframe) - assert_true(dw_plain_philips.is_same_series(dw_plain_philips)) + assert not dw_plain_philips.is_multiframe + assert dw_plain_philips.is_same_series(dw_plain_philips) # specific vs plain wrapper compares False, because the Philips # wrapper has more non-empty information - assert_false(dw_plain_philips.is_same_series(dw_philips)) + assert not dw_plain_philips.is_same_series(dw_philips) # and this should be symmetric - assert_false(dw_philips.is_same_series(dw_plain_philips)) + assert not dw_philips.is_same_series(dw_plain_philips) # we can even make an empty wrapper. This compares True against # itself but False against the others dw_empty = didw.Wrapper({}) - assert_true(dw_empty.is_same_series(dw_empty)) - assert_false(dw_empty.is_same_series(dw_plain_philips)) - assert_false(dw_plain_philips.is_same_series(dw_empty)) + assert dw_empty.is_same_series(dw_empty) + assert not dw_empty.is_same_series(dw_plain_philips) + assert not dw_plain_philips.is_same_series(dw_empty) @dicom_test @@ -290,10 +299,10 @@ def test_slice_indicator(): dw_0 = didw.wrapper_from_file(DATA_FILE_B0) dw_1000 = didw.wrapper_from_data(DATA) z = dw_0.slice_indicator - assert_false(z is None) - assert_equal(z, dw_1000.slice_indicator) + assert not z is None + assert z == dw_1000.slice_indicator dw_empty = didw.Wrapper({}) - assert_true(dw_empty.slice_indicator is None) + assert dw_empty.slice_indicator is None @dicom_test @@ -301,7 +310,7 @@ def test_orthogonal(): # Test that the slice normal is sufficiently orthogonal dw = didw.wrapper_from_file(DATA_FILE_SLC_NORM) R = dw.rotation_matrix - assert_true(np.allclose(np.eye(3), np.dot(R, R.T), atol=1e-6)) + assert np.allclose(np.eye(3), np.dot(R, R.T), atol=1e-6) # Test the threshold for rotation matrix orthogonality d = {} @@ -313,7 +322,8 @@ def test_orthogonal(): assert_array_almost_equal(dw.rotation_matrix, np.eye(3), 5) d['ImageOrientationPatient'] = [1e-4, 1, 0, 1, 0, 0] dw = didw.wrapper_from_data(d) - assert_raises(didw.WrapperPrecisionError, getattr, dw, 'rotation_matrix') + with pytest.raises(didw.WrapperPrecisionError): + dw.rotation_matrix @dicom_test @@ -338,7 +348,7 @@ def test_use_csa_sign(): iop = dw.image_orient_patient dw.image_orient_patient = np.c_[iop[:, 1], iop[:, 0]] dw2 = didw.wrapper_from_file(DATA_FILE_SLC_NORM) - assert_true(np.allclose(dw.slice_normal, dw2.slice_normal)) + assert np.allclose(dw.slice_normal, dw2.slice_normal) @dicom_test @@ -347,7 +357,8 @@ def test_assert_parallel(): # slice normal are not parallel dw = didw.wrapper_from_file(DATA_FILE_SLC_NORM) dw.image_orient_patient = np.c_[[1., 0., 0.], [0., 1., 0.]] - assert_raises(AssertionError, dw.__getattribute__, 'slice_normal') + with pytest.raises(AssertionError): + dw.slice_normal @dicom_test @@ -355,7 +366,7 @@ def test_decimal_rescale(): # Test that we don't get back a data array with dtype np.object when our # rescale slope is a decimal dw = didw.wrapper_from_file(DATA_FILE_DEC_RSCL) - assert_not_equal(dw.get_data().dtype, np.object) + assert dw.get_data().dtype != np.object def fake_frames(seq_name, field_name, value_seq): @@ -449,84 +460,95 @@ def test_shape(self): MFW = self.WRAPCLASS dw = MFW(fake_mf) # No rows, cols, raise WrapperError - assert_raises(didw.WrapperError, getattr, dw, 'image_shape') + with pytest.raises(didw.WrapperError): + dw.image_shape fake_mf['Rows'] = 64 - assert_raises(didw.WrapperError, getattr, dw, 'image_shape') + with pytest.raises(didw.WrapperError): + dw.image_shape fake_mf.pop('Rows') fake_mf['Columns'] = 64 - assert_raises(didw.WrapperError, getattr, dw, 'image_shape') + with pytest.raises(didw.WrapperError): + dw.image_shape fake_mf['Rows'] = 32 # Missing frame data, raise AssertionError - assert_raises(AssertionError, getattr, dw, 'image_shape') + with pytest.raises(AssertionError): + dw.image_shape fake_mf['NumberOfFrames'] = 4 # PerFrameFunctionalGroupsSequence does not match NumberOfFrames - assert_raises(AssertionError, getattr, dw, 'image_shape') + with pytest.raises(AssertionError): + dw.image_shape # check 3D shape when StackID index is 0 div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 4)) + assert MFW(fake_mf).image_shape == (32, 64, 4) # Check stack number matching when StackID index is 0 div_seq = ((1, 1), (1, 2), (1, 3), (2, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'image_shape') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Make some fake frame data for 4D when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 2, 3)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) # Check stack number matching for 4D when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (2, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'image_shape') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Check indices can be non-contiguous when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 2, 2)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 2) # Check indices can include zero when StackID index is 0 div_seq = ((1, 1, 0), (1, 2, 0), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 2, 2)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 2) # check 3D shape when there is no StackID index div_seq = ((1,), (2,), (3,), (4,)) sid_seq = (1, 1, 1, 1) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 4)) + assert MFW(fake_mf).image_shape == (32, 64, 4) # check 3D stack number matching when there is no StackID index div_seq = ((1,), (2,), (3,), (4,)) sid_seq = (1, 1, 1, 2) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'image_shape') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # check 4D shape when there is no StackID index div_seq = ((1, 1), (2, 1), (1, 2), (2, 2), (1, 3), (2, 3)) sid_seq = (1, 1, 1, 1, 1, 1) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 2, 3)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) # check 4D stack number matching when there is no StackID index div_seq = ((1, 1), (2, 1), (1, 2), (2, 2), (1, 3), (2, 3)) sid_seq = (1, 1, 1, 1, 1, 2) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'image_shape') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # check 3D shape when StackID index is 1 div_seq = ((1, 1), (2, 1), (3, 1), (4, 1)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 4)) + assert MFW(fake_mf).image_shape == (32, 64, 4) # Check stack number matching when StackID index is 1 div_seq = ((1, 1), (2, 1), (3, 2), (4, 1)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'image_shape') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Make some fake frame data for 4D when StackID index is 1 div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) - assert_equal(MFW(fake_mf).image_shape, (32, 64, 2, 3)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) def test_iop(self): # Test Image orient patient for multiframe fake_mf = copy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) - assert_raises(didw.WrapperError, getattr, dw, 'image_orient_patient') + with pytest.raises(didw.WrapperError): + dw.image_orient_patient # Make a fake frame fake_frame = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', @@ -535,8 +557,8 @@ def test_iop(self): assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) fake_mf['SharedFunctionalGroupsSequence'] = [None] - assert_raises(didw.WrapperError, - getattr, MFW(fake_mf), 'image_orient_patient') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_orient_patient fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) @@ -546,14 +568,16 @@ def test_voxel_sizes(self): fake_mf = copy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) - assert_raises(didw.WrapperError, getattr, dw, 'voxel_sizes') + with pytest.raises(didw.WrapperError): + dw.voxel_sizes # Make a fake frame fake_frame = fake_frames('PixelMeasuresSequence', 'PixelSpacing', [[2.1, 3.2]])[0] fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] # Still not enough, we lack information for slice distances - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'voxel_sizes') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).voxel_sizes # This can come from SpacingBetweenSlices or frame SliceThickness fake_mf['SpacingBetweenSlices'] = 4.3 assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 4.3]) @@ -565,7 +589,8 @@ def test_voxel_sizes(self): assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Removing shared leads to error again fake_mf['SharedFunctionalGroupsSequence'] = [None] - assert_raises(didw.WrapperError, getattr, MFW(fake_mf), 'voxel_sizes') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).voxel_sizes # Restoring to frames makes it work again fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) @@ -584,7 +609,8 @@ def test_image_position(self): fake_mf = copy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) - assert_raises(didw.WrapperError, getattr, dw, 'image_position') + with pytest.raises(didw.WrapperError): + dw.image_position # Make a fake frame fake_frame = fake_frames('PlanePositionSequence', 'ImagePositionPatient', @@ -592,21 +618,23 @@ def test_image_position(self): fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) fake_mf['SharedFunctionalGroupsSequence'] = [None] - assert_raises(didw.WrapperError, - getattr, MFW(fake_mf), 'image_position') + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_position fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) # Check lists of Decimals work fake_frame.PlanePositionSequence[0].ImagePositionPatient = [ Decimal(str(v)) for v in [-2, 3, 7]] assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) - assert_equal(MFW(fake_mf).image_position.dtype, float) + assert MFW(fake_mf).image_position.dtype == float @dicom_test def test_affine(self): # Make sure we find orientation/position/spacing info dw = didw.wrapper_from_file(DATA_FILE_4D) - dw.get_affine() + aff = dw.affine + with pytest.deprecated_call(): + assert np.array_equal(dw.get_affine(), aff) @dicom_test def test_data_real(self): @@ -619,13 +647,12 @@ def test_data_real(self): if endian_codes[data.dtype.byteorder] == '>': data = data.byteswap() dat_str = data.tostring() - assert_equal(sha1(dat_str).hexdigest(), - '149323269b0af92baa7508e19ca315240f77fa8c') + assert sha1(dat_str).hexdigest() == '149323269b0af92baa7508e19ca315240f77fa8c' @dicom_test def test_slicethickness_fallback(self): dw = didw.wrapper_from_file(DATA_FILE_EMPTY_ST) - assert_equal(dw.voxel_sizes[2], 1.0) + assert dw.voxel_sizes[2] == 1.0 @dicom_test @needs_nibabel_data('nitest-dicom') @@ -633,14 +660,15 @@ def test_data_derived_shape(self): # Test 4D diffusion data with an additional trace volume included # Excludes the trace volume and generates the correct shape dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) - assert_equal(dw.image_shape, (96, 96, 60, 33)) + assert dw.image_shape == (96, 96, 60, 33) @dicom_test @needs_nibabel_data('nitest-dicom') def test_data_unreadable_private_headers(self): # Test CT image with unreadable CSA tags - dw = assert_warns(UserWarning, didw.wrapper_from_file, DATA_FILE_CT) - assert_equal(dw.image_shape, (512, 571)) + with pytest.warns(UserWarning): + dw = didw.wrapper_from_file(DATA_FILE_CT) + assert dw.image_shape == (512, 571) @dicom_test def test_data_fake(self): @@ -649,19 +677,22 @@ def test_data_fake(self): MFW = self.WRAPCLASS dw = MFW(fake_mf) # Fails - no shape - assert_raises(didw.WrapperError, dw.get_data) + with pytest.raises(didw.WrapperError): + dw.get_data() # Set shape by cheating dw.image_shape = (2, 3, 4) # Still fails - no data - assert_raises(didw.WrapperError, dw.get_data) + with pytest.raises(didw.WrapperError): + dw.get_data() # Make shape and indices fake_mf['Rows'] = 2 fake_mf['Columns'] = 3 dim_idxs = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0)) - assert_equal(MFW(fake_mf).image_shape, (2, 3, 4)) + assert MFW(fake_mf).image_shape == (2, 3, 4) # Still fails - no data - assert_raises(didw.WrapperError, dw.get_data) + with pytest.raises(didw.WrapperError): + dw.get_data() # Add data - 3D data = np.arange(24).reshape((2, 3, 4)) # Frames dim is first for some reason @@ -723,7 +754,8 @@ def test__scale_data(self): fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] # Lacking RescaleIntercept -> Error dw = MFW(fake_mf) - assert_raises(AttributeError, dw._scale_data, data) + with pytest.raises(AttributeError): + dw._scale_data(data) fake_frame.PixelValueTransformationSequence[0].RescaleIntercept = -2 assert_array_equal(data * 3 - 2, dw._scale_data(data)) # Decimals are OK diff --git a/nibabel/nicom/tests/test_structreader.py b/nibabel/nicom/tests/test_structreader.py index 05461d18a0..6e58931559 100644 --- a/nibabel/nicom/tests/test_structreader.py +++ b/nibabel/nicom/tests/test_structreader.py @@ -5,9 +5,6 @@ from ..structreader import Unpacker -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) - - def test_unpacker(): s = b'1234\x00\x01' @@ -22,30 +19,30 @@ def test_unpacker(): swapped_int = le_int swapped_code = '<' up_str = Unpacker(s, endian='<') - assert_equal(up_str.read(4), b'1234') + assert up_str.read(4) == b'1234' up_str.ptr = 0 - assert_equal(up_str.unpack('4s'), (b'1234',)) - assert_equal(up_str.unpack('h'), (le_int,)) + assert up_str.unpack('4s') == (b'1234',) + assert up_str.unpack('h') == (le_int,) up_str = Unpacker(s, endian='>') - assert_equal(up_str.unpack('4s'), (b'1234',)) - assert_equal(up_str.unpack('h'), (be_int,)) + assert up_str.unpack('4s') == (b'1234',) + assert up_str.unpack('h') == (be_int,) # now test conflict of endian up_str = Unpacker(s, ptr=4, endian='>') - assert_equal(up_str.unpack('h'), (be_int,)) + assert up_str.unpack('>h') == (be_int,) up_str.ptr = 4 - assert_equal(up_str.unpack('@h'), (native_int,)) + assert up_str.unpack('@h') == (native_int,) # test -1 for read up_str.ptr = 2 - assert_equal(up_str.read(), b'34\x00\x01') + assert up_str.read() == b'34\x00\x01' # past end - assert_equal(up_str.read(), b'') + assert up_str.read() == b'' # with n_bytes up_str.ptr = 2 - assert_equal(up_str.read(2), b'34') - assert_equal(up_str.read(2), b'\x00\x01') + assert up_str.read(2) == b'34' + assert up_str.read(2) == b'\x00\x01' diff --git a/nibabel/nicom/tests/test_utils.py b/nibabel/nicom/tests/test_utils.py index a7ab9b6bdc..142daa3d16 100644 --- a/nibabel/nicom/tests/test_utils.py +++ b/nibabel/nicom/tests/test_utils.py @@ -2,64 +2,48 @@ """ import re - -from numpy.testing import (assert_almost_equal, - assert_array_equal) - -from nose.tools import (assert_true, assert_false, assert_raises, - assert_equal, assert_not_equal) - - from ..utils import find_private_section -from nibabel.pydicom_compat import dicom_test, pydicom -from .test_dicomwrappers import (DATA, DATA_PHILIPS) +from . import dicom_test +from ...pydicom_compat import pydicom +from .test_dicomwrappers import DATA, DATA_PHILIPS @dicom_test def test_find_private_section_real(): # Find section containing named private creator information # On real data first - assert_equal(find_private_section(DATA, 0x29, 'SIEMENS CSA HEADER'), - 0x1000) - assert_equal(find_private_section(DATA, 0x29, 'SIEMENS MEDCOM HEADER2'), - 0x1100) - assert_equal(find_private_section(DATA_PHILIPS, 0x29, 'SIEMENS CSA HEADER'), - None) + assert find_private_section(DATA, 0x29, 'SIEMENS CSA HEADER') == 0x1000 + assert find_private_section(DATA, 0x29, 'SIEMENS MEDCOM HEADER2') == 0x1100 + assert find_private_section(DATA_PHILIPS, 0x29, 'SIEMENS CSA HEADER') == None # Make fake datasets ds = pydicom.dataset.Dataset({}) ds.add_new((0x11, 0x10), 'LO', b'some section') - assert_equal(find_private_section(ds, 0x11, 'some section'), 0x1000) + assert find_private_section(ds, 0x11, 'some section') == 0x1000 ds.add_new((0x11, 0x11), 'LO', b'anther section') ds.add_new((0x11, 0x12), 'LO', b'third section') - assert_equal(find_private_section(ds, 0x11, 'third section'), 0x1200) + assert find_private_section(ds, 0x11, 'third section') == 0x1200 # Wrong 'OB' is acceptable for VM (should be 'LO') ds.add_new((0x11, 0x12), 'OB', b'third section') - assert_equal(find_private_section(ds, 0x11, 'third section'), 0x1200) + assert find_private_section(ds, 0x11, 'third section') == 0x1200 # Anything else not acceptable ds.add_new((0x11, 0x12), 'PN', b'third section') - assert_equal(find_private_section(ds, 0x11, 'third section'), None) + assert find_private_section(ds, 0x11, 'third section') is None # The input (DICOM value) can be a string insteal of bytes ds.add_new((0x11, 0x12), 'LO', 'third section') - assert_equal(find_private_section(ds, 0x11, 'third section'), 0x1200) + assert find_private_section(ds, 0x11, 'third section') == 0x1200 # Search can be bytes as well as string ds.add_new((0x11, 0x12), 'LO', b'third section') - assert_equal(find_private_section(ds, 0x11, b'third section'), 0x1200) + assert find_private_section(ds, 0x11, b'third section') == 0x1200 # Search with string or bytes must be exact - assert_equal(find_private_section(ds, 0x11, b'third sectio'), None) - assert_equal(find_private_section(ds, 0x11, 'hird sectio'), None) + assert find_private_section(ds, 0x11, b'third sectio') is None + assert find_private_section(ds, 0x11, 'hird sectio') is None # The search can be a regexp - assert_equal(find_private_section(ds, - 0x11, - re.compile(r'third\Wsectio[nN]')), - 0x1200) + assert find_private_section(ds, 0x11, re.compile(r'third\Wsectio[nN]')) == 0x1200 # No match -> None - assert_equal(find_private_section(ds, - 0x11, - re.compile(r'not third\Wsectio[nN]')), - None) + assert find_private_section(ds, 0x11, re.compile(r'not third\Wsectio[nN]')) is None # If there are gaps in the sequence before the one we want, that is OK ds.add_new((0x11, 0x13), 'LO', b'near section') - assert_equal(find_private_section(ds, 0x11, 'near section'), 0x1300) + assert find_private_section(ds, 0x11, 'near section') == 0x1300 ds.add_new((0x11, 0x15), 'LO', b'far section') - assert_equal(find_private_section(ds, 0x11, 'far section'), 0x1500) + assert find_private_section(ds, 0x11, 'far section') == 0x1500 diff --git a/nibabel/pydicom_compat.py b/nibabel/pydicom_compat.py index beb787f315..9cca2a293d 100644 --- a/nibabel/pydicom_compat.py +++ b/nibabel/pydicom_compat.py @@ -12,13 +12,19 @@ else None; * tag_for_keyword : ``tag_for_keyword`` function if pydicom or dicom module is importable else None; + +A test decorator is available in nibabel.nicom.tests: + * dicom_test : test decorator that skips test if dicom not available. + +A deprecated copy is available here for backward compatibility. """ # Module has (apparently) unused imports; stop flake8 complaining # flake8: noqa import numpy as np +from .deprecated import deprecate_with_version have_dicom = True pydicom = read_file = tag_for_keyword = Sequence = None @@ -50,6 +56,9 @@ tag_for_keyword = pydicom.datadict.tag_for_name -# test decorator that skips test if dicom not available. -dicom_test = np.testing.dec.skipif(not have_dicom, - 'could not import dicom or pydicom') +@deprecate_with_version("dicom_test has been moved to nibabel.nicom.tests", + since="3.1", until="5.0") +def dicom_test(func): + # Import locally to avoid circular dependency + from .nicom.tests import dicom_test + return dicom_test(func) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index d5dff4a4e4..1bb4e5ae2e 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -49,7 +49,8 @@ header_file = os.path.join(data_path, 'nifti1.hdr') image_file = os.path.join(data_path, 'example4d.nii.gz') -from nibabel.pydicom_compat import pydicom, dicom_test +from ..pydicom_compat import pydicom +from ..nicom.tests import dicom_test # Example transformation matrix From a23c63a24593aca804377e8837e78d9193d03724 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 7 Feb 2020 00:53:20 -0500 Subject: [PATCH 640/689] TEST: Use vanilla unittest.skip* over pytest --- nibabel/freesurfer/tests/test_io.py | 7 ++-- nibabel/tests/nibabel_data.py | 10 +++--- nibabel/tests/test_dft.py | 13 +++++--- nibabel/tests/test_image_api.py | 9 ++++-- nibabel/tests/test_parrec_data.py | 3 +- nibabel/tests/test_processing.py | 6 ++-- nibabel/tests/test_proxy_api.py | 3 +- nibabel/tests/test_scripts.py | 3 +- nibabel/tests/test_spm99analyze.py | 50 +++++++++-------------------- nibabel/tests/test_viewers.py | 3 +- 10 files changed, 49 insertions(+), 58 deletions(-) diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index 521923057f..e1ec971c65 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -6,10 +6,9 @@ import hashlib import warnings - from ...tmpdirs import InTemporaryDirectory - +import unittest import pytest import numpy as np from numpy.testing import assert_allclose, assert_array_equal @@ -36,8 +35,8 @@ data_path = pjoin(nib_data, 'nitest-freesurfer', DATA_SDIR) have_freesurfer = isdir(data_path) -freesurfer_test = pytest.mark.skipif(not have_freesurfer, - reason='cannot find freesurfer {0} directory'.format(DATA_SDIR)) +freesurfer_test = unittest.skipUnless(have_freesurfer, + 'cannot find freesurfer {0} directory'.format(DATA_SDIR)) def _hash_file_content(fname): hasher = hashlib.md5() diff --git a/nibabel/tests/nibabel_data.py b/nibabel/tests/nibabel_data.py index e2e5bc9ed3..3c1b58502d 100644 --- a/nibabel/tests/nibabel_data.py +++ b/nibabel/tests/nibabel_data.py @@ -4,7 +4,7 @@ from os import environ, listdir from os.path import dirname, realpath, join as pjoin, isdir, exists -import pytest +import unittest def get_nibabel_data(): @@ -39,11 +39,11 @@ def needs_nibabel_data(subdir=None): """ nibabel_data = get_nibabel_data() if nibabel_data == '': - return pytest.mark.skipif(True, reason="Need nibabel-data directory for this test") + return unittest.skip("Need nibabel-data directory for this test") if subdir is None: - return pytest.mark.skipif(False, reason="Don't skip") + return lambda x: x required_path = pjoin(nibabel_data, subdir) # Path should not be empty (as is the case for not-updated submodules) have_files = exists(required_path) and len(listdir(required_path)) > 0 - return pytest.mark.skipif(not have_files, - reason="Need files in {0} for these tests".format(required_path)) \ No newline at end of file + return unittest.skipUnless(have_files, + "Need files in {0} for these tests".format(required_path)) diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index d50ba4023e..9c3a4f5d85 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -10,6 +10,7 @@ from .. import dft from .. import nifti1 +import unittest import pytest # Shield optional package imports @@ -22,10 +23,12 @@ data_dir = pjoin(dirname(__file__), 'data') -pytestmark = [ - pytest.mark.skipif(os.name == 'nt', reason='FUSE not available for windows, skipping dft tests'), - pytest.mark.skipif(not have_dicom, reason='Need pydicom for dft tests, skipping') -] +def setUpModule(): + if os.name == 'nt': + raise unittest.SkipTest('FUSE not available for windows, skipping dft tests') + if not have_dicom: + raise unittest.SkipTest('Need pydicom for dft tests, skipping') + def test_init(): dft.clear_cache() @@ -76,7 +79,7 @@ def test_storage_instance(): pass -@pytest.mark.skipif(not have_pil, reason='could not import PIL.Image') +@unittest.skipUnless(have_pil, 'could not import PIL.Image') def test_png(): studies = dft.get_studies(data_dir) data = studies[0].series[0].as_png() diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index f5aeb53822..f6245aa594 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -41,6 +41,7 @@ from ..spatialimages import SpatialImage from .. import minc1, minc2, parrec, brikhead +import unittest import pytest from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns, assert_allclose @@ -121,7 +122,7 @@ def validate_filenames(self, imaker, params): # Validate the filename, file_map interface if not self.can_save: - pytest.skip() + raise unittest.SkipTest img = imaker() img.set_data_dtype(np.float32) # to avoid rounding in load / save # Make sure the object does not have a file_map @@ -706,8 +707,12 @@ class TestMinc1API(ImageHeaderAPI): example_images = MINC1_EXAMPLE_IMAGES -@pytest.mark.skipif(not have_h5py, reason="Need h5py for Minc2 tests") class TestMinc2API(TestMinc1API): + + def __init__(self): + if not have_h5py: + raise unittest.SkipTest('Need h5py for these tests') + klass = image_maker = Minc2Image loader = minc2.load example_images = MINC2_EXAMPLE_IMAGES diff --git a/nibabel/tests/test_parrec_data.py b/nibabel/tests/test_parrec_data.py index 30295a269a..e626068ca1 100644 --- a/nibabel/tests/test_parrec_data.py +++ b/nibabel/tests/test_parrec_data.py @@ -12,6 +12,7 @@ from .nibabel_data import get_nibabel_data, needs_nibabel_data +import unittest import pytest from numpy.testing import assert_almost_equal @@ -60,7 +61,7 @@ def test_fieldmap(): fieldmap_nii = pjoin(BALLS, 'NIFTI', 'fieldmap.nii.gz') load(fieldmap_par) top_load(fieldmap_nii) - raise pytest.skip('Fieldmap remains puzzling') + raise unittest.SkipTest('Fieldmap remains puzzling') @needs_nibabel_data('parrec_oblique') diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index dea34b85a2..1dd64c6a5c 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -27,14 +27,14 @@ voxel_sizes) from nibabel.eulerangles import euler2mat -from numpy.testing import (assert_almost_equal, - assert_array_equal) +from numpy.testing import assert_almost_equal, assert_array_equal +import unittest import pytest from nibabel.tests.test_spaces import assert_all_in, get_outspace_params from nibabel.testing import assert_allclose_safely -needs_scipy = pytest.mark.skipif(not have_scipy, reason='These tests need scipy') +needs_scipy = unittest.skipUnless(have_scipy, 'These tests need scipy') DATA_DIR = pjoin(dirname(__file__), 'data') diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index c74d4fbdb2..f8e25c14cc 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -51,6 +51,7 @@ from ..arrayproxy import ArrayProxy, is_proxy +import unittest import pytest from numpy.testing import assert_almost_equal, assert_array_equal, assert_allclose @@ -428,7 +429,7 @@ def eg_func(): arr_out=arr_out)) def validate_header_isolated(self, pmaker, params): - raise pytest.skip('ECAT header does not support dtype get') + raise unittest.SkipTest('ECAT header does not support dtype get') class TestPARRECAPI(_TestProxyAPI): diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 19dd5f6fd0..c8c87092bf 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -20,6 +20,7 @@ from ..loadsave import load from ..orientations import flip_axis, aff2axcodes, inv_ornt_aff +import unittest import pytest from numpy.testing import assert_almost_equal @@ -120,7 +121,7 @@ def test_nib_ls(args): check_nib_ls_example4d(*args) -@pytest.mark.skipif(not load_small_file(), reason="can't load the small.mnc file") +@unittest.skipUnless(load_small_file(), "Can't load the small.mnc file") @script_test def test_nib_ls_multiple(): # verify that correctly lists/formats for multiple files diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 977fef3071..3198c43ea7 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -13,14 +13,15 @@ from io import BytesIO from numpy.testing import assert_array_equal, assert_array_almost_equal +import unittest import pytest -# Decorator to skip tests requiring save / load if scipy not available for mat -# files from ..optpkg import optional_package _, have_scipy, _ = optional_package('scipy') -scipy_skip = pytest.mark.skipif(not have_scipy, reason='scipy not available') +# Decorator to skip tests requiring save / load if scipy not available for mat +# files +needs_scipy = unittest.skipUnless(have_scipy, 'scipy not available') from ..spm99analyze import (Spm99AnalyzeHeader, Spm99AnalyzeImage, HeaderTypeError) @@ -409,38 +410,17 @@ class TestSpm99AnalyzeImage(test_analyze.TestAnalyzeImage, ImageScalingMixin): image_class = Spm99AnalyzeImage # Decorating the old way, before the team invented @ - test_data_hdr_cache = (scipy_skip( - test_analyze.TestAnalyzeImage.test_data_hdr_cache - )) - - test_header_updating = (scipy_skip( - test_analyze.TestAnalyzeImage.test_header_updating - )) - - test_offset_to_zero = (scipy_skip( - test_analyze.TestAnalyzeImage.test_offset_to_zero - )) - - test_big_offset_exts = (scipy_skip( - test_analyze.TestAnalyzeImage.test_big_offset_exts - )) - - test_header_scaling = scipy_skip( - ImageScalingMixin.test_header_scaling) - - test_int_int_scaling = scipy_skip( - ImageScalingMixin.test_int_int_scaling) - - test_write_scaling = scipy_skip( - ImageScalingMixin.test_write_scaling) - - test_no_scaling = scipy_skip( - ImageScalingMixin.test_no_scaling) - - test_nan2zero_range_ok = scipy_skip( - ImageScalingMixin.test_nan2zero_range_ok) - - @scipy_skip + test_data_hdr_cache = needs_scipy(test_analyze.TestAnalyzeImage.test_data_hdr_cache) + test_header_updating = needs_scipy(test_analyze.TestAnalyzeImage.test_header_updating) + test_offset_to_zero = needs_scipy(test_analyze.TestAnalyzeImage.test_offset_to_zero) + test_big_offset_exts = needs_scipy(test_analyze.TestAnalyzeImage.test_big_offset_exts) + test_header_scaling = needs_scipy(ImageScalingMixin.test_header_scaling) + test_int_int_scaling = needs_scipy(ImageScalingMixin.test_int_int_scaling) + test_write_scaling = needs_scipy(ImageScalingMixin.test_write_scaling) + test_no_scaling = needs_scipy(ImageScalingMixin.test_no_scaling) + test_nan2zero_range_ok = needs_scipy(ImageScalingMixin.test_nan2zero_range_ok) + + @needs_scipy def test_mat_read(self): # Test mat file reading and writing for the SPM analyze types img_klass = self.image_class diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index 1be6e400a2..8bcb5bda06 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -16,11 +16,12 @@ from numpy.testing import assert_array_equal, assert_equal +import unittest import pytest # Need at least MPL 1.3 for viewer tests. matplotlib, has_mpl, _ = optional_package('matplotlib', min_version='1.3') -needs_mpl = pytest.mark.skipif(not has_mpl, reason='These tests need matplotlib') +needs_mpl = unittest.skipUnless(has_mpl, 'These tests need matplotlib') if has_mpl: matplotlib.use('Agg') From 2542320e7e528c91d548f42951963b400fb9c6c7 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 7 Feb 2020 15:20:14 -0500 Subject: [PATCH 641/689] RF: Remove unused pieces of testing_pytest and switch to unittest skip --- nibabel/testing_pytest/__init__.py | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/nibabel/testing_pytest/__init__.py b/nibabel/testing_pytest/__init__.py index 71217612ed..52055ebcc3 100644 --- a/nibabel/testing_pytest/__init__.py +++ b/nibabel/testing_pytest/__init__.py @@ -13,17 +13,12 @@ import sys import warnings from pkg_resources import resource_filename -from os.path import dirname, abspath, join as pjoin import unittest import numpy as np -from numpy.testing import assert_array_equal, assert_warns -from numpy.testing import dec -skipif = dec.skipif -slow = dec.slow +from numpy.testing import assert_array_equal -from ..deprecated import deprecate_with_version as _deprecate_with_version from .np_features import memmap_after_ufunc from .helpers import bytesio_filemap, bytesio_round_trip, assert_data_similar @@ -63,7 +58,7 @@ def assert_allclose_safely(a, b, match_nans=True, rtol=1e-5, atol=1e-8): a, b = np.broadcast_arrays(a, b) if match_nans: nans = np.isnan(a) - np.testing.assert_array_equal(nans, np.isnan(b)) + assert_array_equal(nans, np.isnan(b)) to_test = ~nans else: to_test = np.ones(a.shape, dtype=bool) @@ -199,20 +194,12 @@ class suppress_warnings(error_warnings): filter = 'ignore' -@_deprecate_with_version('catch_warn_reset is deprecated; use ' - 'nibabel.testing.clear_and_catch_warnings.', - since='2.1.0', until='3.0.0') -class catch_warn_reset(clear_and_catch_warnings): - pass - - EXTRA_SET = os.environ.get('NIPY_EXTRA_TESTS', '').split(',') def runif_extra_has(test_str): """Decorator checks to see if NIPY_EXTRA_TESTS env var contains test_str""" - return skipif(test_str not in EXTRA_SET, - "Skip {0} tests.".format(test_str)) + return unittest.skipUnless(test_str in EXTRA_SET, "Skip {0} tests.".format(test_str)) def assert_arr_dict_equal(dict1, dict2): From a223c071b59cd00dcf1cdd2044975746b9f8f74b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 7 Feb 2020 21:20:23 -0500 Subject: [PATCH 642/689] CI: Only ignore files that use fixtures --- .azure-pipelines/windows.yml | 58 +----------------------------------- .travis.yml | 58 +----------------------------------- 2 files changed, 2 insertions(+), 114 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 8de4ed3466..fcdd542223 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -41,73 +41,17 @@ jobs: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel ^ - -I test_array_sequence ^ - -I test_tractogram ^ - -I test_api_validators ^ - -I test_arrayproxy ^ - -I test_arraywriters ^ - -I test_batteryrunners ^ - -I test_brikhead ^ - -I test_casting ^ - -I test_cifti2io_axes ^ - -I test_cifti2io_header ^ -I test_data ^ - -I test_deprecated ^ - -I test_deprecator ^ - -I test_dicomwrappers ^ - -I test_dft ^ - -I test_ecat ^ - -I test_ecat_data ^ - -I test_endiancodes ^ -I test_environment ^ -I test_euler ^ - -I test_filebasedimages ^ - -I test_filehandles ^ - -I test_fileholders ^ - -I test_filename_parser ^ - -I test_files_interface ^ - -I test_fileslice ^ - -I test_fileutils ^ - -I test_floating ^ - -I test_funcs ^ -I test_giftiio ^ - -I test_h5py_compat ^ - -I test_image_api ^ - -I test_image_load_save ^ - -I test_image_types ^ - -I test_imageclasses ^ - -I test_imageglobals ^ - -I test_io ^ - -I test_keywordonly ^ - -I test_loadsave ^ - -I test_minc1 ^ - -I test_minc2 ^ - -I test_minc2_data ^ - -I test_mriutils ^ -I test_netcdf ^ - -I test_nibabel_data ^ - -I test_nifti1 ^ - -I test_nifti2 ^ - -I test_openers ^ - -I test_optpkg ^ - -I test_orientations ^ - -I test_parrec ^ - -I test_parrec_data ^ -I test_pkg_info ^ - -I test_processing ^ - -I test_proxy_api ^ -I test_quaternions ^ - -I test_recoder ^ - -I test_remmovalschedule ^ - -I test_round_trip ^ - -I test_rstutils ^ -I test_scaling ^ - -I test_wrapstruct ^ - -I test_io ^ -I test_scripts ^ -I test_spaces ^ - -I test_testing ^ - -I test_wrapstruct + -I test_testing displayName: 'Nose tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) - script: | diff --git a/.travis.yml b/.travis.yml index 711601764b..857867c712 100644 --- a/.travis.yml +++ b/.travis.yml @@ -134,73 +134,17 @@ script: cd for_testing cp ../.coveragerc . nosetests --with-doctest --with-coverage --cover-package nibabel nibabel \ - -I test_array_sequence \ - -I test_tractogram \ - -I test_api_validators \ - -I test_arrayproxy \ - -I test_arraywriters \ - -I test_batteryrunners \ - -I test_brikhead \ - -I test_casting \ - -I test_cifti2io_axes \ - -I test_cifti2io_header \ -I test_data \ - -I test_deprecated \ - -I test_deprecator \ - -I test_dicomwrappers \ - -I test_dft \ - -I test_ecat \ - -I test_ecat_data \ - -I test_endiancodes \ -I test_environment \ -I test_euler \ - -I test_filebasedimages \ - -I test_filehandles \ - -I test_fileholders \ - -I test_filename_parser \ - -I test_files_interface \ - -I test_fileslice \ - -I test_fileutils \ - -I test_floating \ - -I test_funcs \ -I test_giftiio \ - -I test_h5py_compat \ - -I test_image_api \ - -I test_image_load_save \ - -I test_image_types \ - -I test_imageclasses \ - -I test_imageglobals \ - -I test_io \ - -I test_keywordonly \ - -I test_loadsave \ - -I test_minc1 \ - -I test_minc2 \ - -I test_minc2_data \ - -I test_mriutils \ -I test_netcdf \ - -I test_nibabel_data \ - -I test_nifti1 \ - -I test_nifti2 \ - -I test_openers \ - -I test_optpkg \ - -I test_orientations \ - -I test_parrec \ - -I test_parrec_data \ -I test_pkg_info \ - -I test_processing \ - -I test_proxy_api \ -I test_quaternions \ - -I test_recoder \ - -I test_remmovalschedule \ - -I test_round_trip \ - -I test_rstutils \ -I test_scaling \ - -I test_wrapstruct \ - -I test_io \ -I test_scripts \ -I test_spaces \ - -I test_testing \ - -I test_wrapstruct + -I test_testing elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing From b2f40bdcaf5c81e8d89142ca784e4a2f2850f577 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 8 Feb 2020 22:41:05 -0500 Subject: [PATCH 643/689] TEST: De-indent tests, cleanup style --- nibabel/tests/test_filename_parser.py | 130 +++++++++++--------------- 1 file changed, 53 insertions(+), 77 deletions(-) diff --git a/nibabel/tests/test_filename_parser.py b/nibabel/tests/test_filename_parser.py index 22178a4349..b0abc6d608 100644 --- a/nibabel/tests/test_filename_parser.py +++ b/nibabel/tests/test_filename_parser.py @@ -18,83 +18,59 @@ def test_filenames(): types_exts = (('image', '.img'), ('header', '.hdr')) for t_fname in ('test.img', 'test.hdr', 'test', 'test.'): tfns = types_filenames(t_fname, types_exts) - assert (tfns == - {'image': 'test.img', - 'header': 'test.hdr'}) - # enforcing extensions raises an error for bad extension - with pytest.raises(TypesFilenamesError): - types_filenames('test.funny', types_exts) - # If not enforcing extensions, it does the best job it can, - # assuming the passed filename is for the first type (in this case - # 'image') - tfns = types_filenames('test.funny', types_exts, - enforce_extensions=False) - assert (tfns == - {'header': 'test.hdr', - 'image': 'test.funny'}) - # .gz and .bz2 suffixes to extensions, by default, are removed - # before extension checking etc, and then put back onto every - # returned filename. - tfns = types_filenames('test.img.gz', types_exts) - assert (tfns == - {'header': 'test.hdr.gz', - 'image': 'test.img.gz'}) - tfns = types_filenames('test.img.bz2', types_exts) - assert (tfns == - {'header': 'test.hdr.bz2', - 'image': 'test.img.bz2'}) - # of course, if we don't know about e.g. gz, and enforce_extensions - # is on, we get an errror - with pytest.raises(TypesFilenamesError): - types_filenames('test.img.gz', types_exts, ()) - # if we don't know about .gz extension, and not enforcing, then we - # get something a bit odd - tfns = types_filenames('test.img.gz', types_exts, - trailing_suffixes=(), - enforce_extensions=False) - assert (tfns == - {'header': 'test.img.hdr', - 'image': 'test.img.gz'}) - # the suffixes we remove and replaces can be any suffixes. - tfns = types_filenames('test.img.bzr', types_exts, ('.bzr',)) - assert (tfns == - {'header': 'test.hdr.bzr', - 'image': 'test.img.bzr'}) - # If we specifically pass the remove / replace suffixes, then we - # don't remove / replace the .gz and .bz2, unless they are passed - # specifically. - tfns = types_filenames('test.img.bzr', types_exts, - trailing_suffixes=('.bzr',), - enforce_extensions=False) - assert (tfns == - {'header': 'test.hdr.bzr', - 'image': 'test.img.bzr'}) - # but, just .gz or .bz2 as extension gives an error, if enforcing is on - with pytest.raises(TypesFilenamesError): - types_filenames('test.gz', types_exts) - with pytest.raises(TypesFilenamesError): - types_filenames('test.bz2', types_exts) - # if enforcing is off, it tries to work out what the other files - # should be assuming the passed filename is of the first input type - tfns = types_filenames('test.gz', types_exts, - enforce_extensions=False) - assert (tfns == - {'image': 'test.gz', - 'header': 'test.hdr.gz'}) - # case (in)sensitivity, and effect of uppercase, lowercase - tfns = types_filenames('test.IMG', types_exts) - assert (tfns == - {'image': 'test.IMG', - 'header': 'test.HDR'}) - tfns = types_filenames('test.img', - (('image', '.IMG'), ('header', '.HDR'))) - assert (tfns == - {'header': 'test.hdr', - 'image': 'test.img'}) - tfns = types_filenames('test.IMG.Gz', types_exts) - assert (tfns == - {'image': 'test.IMG.Gz', - 'header': 'test.HDR.Gz'}) + assert tfns == {'image': 'test.img', 'header': 'test.hdr'} + # enforcing extensions raises an error for bad extension + with pytest.raises(TypesFilenamesError): + types_filenames('test.funny', types_exts) + # If not enforcing extensions, it does the best job it can, + # assuming the passed filename is for the first type (in this case + # 'image') + tfns = types_filenames('test.funny', types_exts, enforce_extensions=False) + assert tfns == {'header': 'test.hdr', 'image': 'test.funny'} + # .gz and .bz2 suffixes to extensions, by default, are removed + # before extension checking etc, and then put back onto every + # returned filename. + tfns = types_filenames('test.img.gz', types_exts) + assert tfns == {'header': 'test.hdr.gz', 'image': 'test.img.gz'} + tfns = types_filenames('test.img.bz2', types_exts) + assert tfns == {'header': 'test.hdr.bz2', 'image': 'test.img.bz2'} + # of course, if we don't know about e.g. gz, and enforce_extensions + # is on, we get an errror + with pytest.raises(TypesFilenamesError): + types_filenames('test.img.gz', types_exts, ()) + # if we don't know about .gz extension, and not enforcing, then we + # get something a bit odd + tfns = types_filenames('test.img.gz', types_exts, + trailing_suffixes=(), + enforce_extensions=False) + assert tfns == {'header': 'test.img.hdr', 'image': 'test.img.gz'} + # the suffixes we remove and replaces can be any suffixes. + tfns = types_filenames('test.img.bzr', types_exts, ('.bzr',)) + assert tfns == {'header': 'test.hdr.bzr', 'image': 'test.img.bzr'} + # If we specifically pass the remove / replace suffixes, then we + # don't remove / replace the .gz and .bz2, unless they are passed + # specifically. + tfns = types_filenames('test.img.bzr', types_exts, + trailing_suffixes=('.bzr',), + enforce_extensions=False) + assert tfns == {'header': 'test.hdr.bzr', 'image': 'test.img.bzr'} + # but, just .gz or .bz2 as extension gives an error, if enforcing is on + with pytest.raises(TypesFilenamesError): + types_filenames('test.gz', types_exts) + with pytest.raises(TypesFilenamesError): + types_filenames('test.bz2', types_exts) + # if enforcing is off, it tries to work out what the other files + # should be assuming the passed filename is of the first input type + tfns = types_filenames('test.gz', types_exts, + enforce_extensions=False) + assert tfns == {'image': 'test.gz', 'header': 'test.hdr.gz'} + # case (in)sensitivity, and effect of uppercase, lowercase + tfns = types_filenames('test.IMG', types_exts) + assert tfns == {'image': 'test.IMG', 'header': 'test.HDR'} + tfns = types_filenames('test.img', (('image', '.IMG'), ('header', '.HDR'))) + assert tfns == {'header': 'test.hdr', 'image': 'test.img'} + tfns = types_filenames('test.IMG.Gz', types_exts) + assert tfns == {'image': 'test.IMG.Gz', 'header': 'test.HDR.Gz'} def test_parse_filename(): From 8ed033cf1d7b9f6f63ae83bee8539571fd7dc087 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sat, 8 Feb 2020 23:07:13 -0500 Subject: [PATCH 644/689] TEST/STY: Cleaner line breaks and alignment for test_fileslice --- nibabel/tests/test_fileslice.py | 346 ++++++++++++-------------------- 1 file changed, 123 insertions(+), 223 deletions(-) diff --git a/nibabel/tests/test_fileslice.py b/nibabel/tests/test_fileslice.py index 07a2627910..21b1224d66 100644 --- a/nibabel/tests/test_fileslice.py +++ b/nibabel/tests/test_fileslice.py @@ -64,46 +64,33 @@ def test_canonical_slicers(): for slice1 in slicers: sliceobj = (slice0, slice1) assert canonical_slicers(sliceobj, shape) == sliceobj - assert (canonical_slicers(sliceobj, shape + (2, 3, 4)) == - sliceobj + (slice(None),) * 3) - assert (canonical_slicers(sliceobj * 3, shape * 3) == - sliceobj * 3) + assert canonical_slicers(sliceobj, shape + (2, 3, 4)) == sliceobj + (slice(None),) * 3 + assert canonical_slicers(sliceobj * 3, shape * 3) == sliceobj * 3 # Check None passes through - assert (canonical_slicers(sliceobj + (None,), shape) == - sliceobj + (None,)) - assert (canonical_slicers((None,) + sliceobj, shape) == - (None,) + sliceobj) + assert canonical_slicers(sliceobj + (None,), shape) == sliceobj + (None,) + assert canonical_slicers((None,) + sliceobj, shape) == (None,) + sliceobj assert (canonical_slicers((None,) + sliceobj + (None,), shape) == - (None,) + sliceobj + (None,)) + (None,) + sliceobj + (None,)) # Check Ellipsis - assert (canonical_slicers((Ellipsis,), shape) == - (slice(None), slice(None))) - assert (canonical_slicers((Ellipsis, None), shape) == - (slice(None), slice(None), None)) - assert (canonical_slicers((Ellipsis, 1), shape) == - (slice(None), 1)) - assert (canonical_slicers((1, Ellipsis), shape) == - (1, slice(None))) + assert canonical_slicers((Ellipsis,), shape) == (slice(None), slice(None)) + assert canonical_slicers((Ellipsis, None), shape) == (slice(None), slice(None), None) + assert canonical_slicers((Ellipsis, 1), shape) == (slice(None), 1) + assert canonical_slicers((1, Ellipsis), shape) == (1, slice(None)) # Ellipsis at end does nothing - assert (canonical_slicers((1, 1, Ellipsis), shape) == - (1, 1)) + assert canonical_slicers((1, 1, Ellipsis), shape) == (1, 1) assert (canonical_slicers((1, Ellipsis, 2), (10, 1, 2, 3, 11)) == - (1, slice(None), slice(None), slice(None), 2)) + (1, slice(None), slice(None), slice(None), 2)) with pytest.raises(ValueError): canonical_slicers((Ellipsis, 1, Ellipsis), (2, 3, 4, 5)) # Check full slices get expanded for slice0 in (slice(10), slice(0, 10), slice(0, 10, 1)): - assert (canonical_slicers((slice0, 1), shape) == - (slice(None), 1)) + assert canonical_slicers((slice0, 1), shape) == (slice(None), 1) for slice0 in (slice(10), slice(0, 10), slice(0, 10, 1)): - assert (canonical_slicers((slice0, 1), shape) == - (slice(None), 1)) - assert (canonical_slicers((1, slice0), shape) == - (1, slice(None))) + assert canonical_slicers((slice0, 1), shape) == (slice(None), 1) + assert canonical_slicers((1, slice0), shape) == (1, slice(None)) # Check ints etc get parsed through to tuples assert canonical_slicers(1, shape) == (1, slice(None)) - assert (canonical_slicers(slice(None), shape) == - (slice(None), slice(None))) + assert canonical_slicers(slice(None), shape) == (slice(None), slice(None)) # Check fancy indexing raises error with pytest.raises(ValueError): canonical_slicers((np.array(1), 1), shape) @@ -205,28 +192,18 @@ def test_fill_slicer(): assert fill_slicer(slice(1, 11, 2), 10) == slice(1, 10, 2) assert fill_slicer(slice(0, 11, 3), 10) == slice(0, 10, 3) assert fill_slicer(slice(1, 11, 3), 10) == slice(1, 10, 3) - assert (fill_slicer(slice(None, None, -1), 10) == - slice(9, None, -1)) - assert (fill_slicer(slice(11, None, -1), 10) == - slice(9, None, -1)) - assert (fill_slicer(slice(None, 1, -1), 10) == - slice(9, 1, -1)) - assert (fill_slicer(slice(None, None, -2), 10) == - slice(9, None, -2)) - assert (fill_slicer(slice(None, None, -3), 10) == - slice(9, None, -3)) - assert (fill_slicer(slice(None, 0, -3), 10) == - slice(9, 0, -3)) + assert fill_slicer(slice(None, None, -1), 10) == slice(9, None, -1) + assert fill_slicer(slice(11, None, -1), 10) == slice(9, None, -1) + assert fill_slicer(slice(None, 1, -1), 10) == slice(9, 1, -1) + assert fill_slicer(slice(None, None, -2), 10) == slice(9, None, -2) + assert fill_slicer(slice(None, None, -3), 10) == slice(9, None, -3) + assert fill_slicer(slice(None, 0, -3), 10) == slice(9, 0, -3) # Start, end are always taken to be relative if negative - assert (fill_slicer(slice(None, -4, -1), 10) == - slice(9, 6, -1)) - assert (fill_slicer(slice(-4, -2, 1), 10) == - slice(6, 8, 1)) + assert fill_slicer(slice(None, -4, -1), 10) == slice(9, 6, -1) + assert fill_slicer(slice(-4, -2, 1), 10) == slice(6, 8, 1) # start after stop - assert (fill_slicer(slice(3, 2, 1), 10) == - slice(3, 2, 1)) - assert (fill_slicer(slice(2, 3, -1), 10) == - slice(2, 3, -1)) + assert fill_slicer(slice(3, 2, 1), 10) == slice(3, 2, 1) + assert fill_slicer(slice(2, 3, -1), 10) == slice(2, 3, -1) def test__positive_slice(): @@ -247,37 +224,21 @@ def test_threshold_heuristic(): assert threshold_heuristic(1, 9, 2, skip_thresh=16) == 'full' assert threshold_heuristic(1, 9, 2, skip_thresh=15) is None # full slice, smallest step size - assert (threshold_heuristic( - slice(0, 9, 1), 9, 2, skip_thresh=2) == - 'full') + assert threshold_heuristic(slice(0, 9, 1), 9, 2, skip_thresh=2) == 'full' # Dropping skip thresh below step size gives None - assert (threshold_heuristic( - slice(0, 9, 1), 9, 2, skip_thresh=1) == - None) + assert threshold_heuristic(slice(0, 9, 1), 9, 2, skip_thresh=1) == None # As does increasing step size - assert (threshold_heuristic( - slice(0, 9, 2), 9, 2, skip_thresh=3) == - None) + assert threshold_heuristic(slice(0, 9, 2), 9, 2, skip_thresh=3) == None # Negative step size same as positive - assert (threshold_heuristic( - slice(9, None, -1), 9, 2, skip_thresh=2) == - 'full') + assert threshold_heuristic(slice(9, None, -1), 9, 2, skip_thresh=2) == 'full' # Add a gap between start and end. Now contiguous because of step size - assert (threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=2) == - 'contiguous') + assert threshold_heuristic(slice(2, 9, 1), 9, 2, skip_thresh=2) == 'contiguous' # To not-contiguous, even with step size 1 - assert (threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=1) == - None) + assert threshold_heuristic(slice(2, 9, 1), 9, 2, skip_thresh=1) == None # Back to full when skip covers gap - assert (threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=4) == - 'full') + assert threshold_heuristic(slice(2, 9, 1), 9, 2, skip_thresh=4) == 'full' # Until it doesn't cover the gap - assert (threshold_heuristic( - slice(2, 9, 1), 9, 2, skip_thresh=3) == - 'contiguous') + assert threshold_heuristic(slice(2, 9, 1), 9, 2, skip_thresh=3) == 'contiguous' # Some dummy heuristics for optimize_slicer @@ -309,233 +270,175 @@ def test_optimize_slicer(): # following tests not affected by all_full or optimization # full - always passes through assert ( - optimize_slicer(slice(None), 10, all_full, 4, heuristic) == + optimize_slicer(slice(None), 10, all_full, is_slowest, 4, heuristic) == (slice(None), slice(None))) # Even if full specified with explicit values assert ( - optimize_slicer(slice(10), 10, all_full, 4, heuristic) == + optimize_slicer(slice(10), 10, all_full, is_slowest, 4, heuristic) == (slice(None), slice(None))) assert ( - optimize_slicer(slice(0, 10), 10, all_full, 4, heuristic) == + optimize_slicer(slice(0, 10), 10, all_full, is_slowest, 4, heuristic) == (slice(None), slice(None))) assert ( - optimize_slicer(slice(0, 10, 1), 10, all_full, 4, heuristic) == + optimize_slicer(slice(0, 10, 1), 10, all_full, is_slowest, 4, heuristic) == (slice(None), slice(None))) # Reversed full is still full, but with reversed post_slice assert ( optimize_slicer( - slice(None, None, -1), 10, all_full, 4, heuristic) == + slice(None, None, -1), 10, all_full, is_slowest, 4, heuristic) == (slice(None), slice(None, None, -1))) # Contiguous is contiguous unless heuristic kicks in, in which case it may # be 'full' - assert ( - optimize_slicer(slice(9), 10, False, False, 4, _always) == - (slice(0, 9, 1), slice(None))) - assert ( - optimize_slicer(slice(9), 10, True, False, 4, _always) == - (slice(None), slice(0, 9, 1))) + assert optimize_slicer(slice(9), 10, False, False, 4, _always) == (slice(0, 9, 1), slice(None)) + assert optimize_slicer(slice(9), 10, True, False, 4, _always) == (slice(None), slice(0, 9, 1)) # Unless this is the slowest dimenion, and all_true is True, in which case # we don't update to full - assert ( - optimize_slicer(slice(9), 10, True, True, 4, _always) == - (slice(0, 9, 1), slice(None))) + assert optimize_slicer(slice(9), 10, True, True, 4, _always) == (slice(0, 9, 1), slice(None)) # Nor if the heuristic won't update - assert ( - optimize_slicer(slice(9), 10, True, False, 4, _never) == - (slice(0, 9, 1), slice(None))) - assert ( - optimize_slicer(slice(1, 10), 10, True, False, 4, _never) == - (slice(1, 10, 1), slice(None))) + assert optimize_slicer(slice(9), 10, True, False, 4, _never) == (slice(0, 9, 1), slice(None)) + assert (optimize_slicer(slice(1, 10), 10, True, False, 4, _never) == + (slice(1, 10, 1), slice(None))) # Reversed contiguous still contiguous - assert ( - optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never) == - (slice(0, 9, 1), slice(None, None, -1))) - assert ( - optimize_slicer(slice(8, None, -1), 10, True, False, 4, _always) == - (slice(None), slice(8, None, -1))) - assert ( - optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never) == - (slice(0, 9, 1), slice(None, None, -1))) - assert ( - optimize_slicer(slice(9, 0, -1), 10, False, False, 4, _never) == - (slice(1, 10, 1), slice(None, None, -1))) + assert (optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never) == + (slice(0, 9, 1), slice(None, None, -1))) + assert (optimize_slicer(slice(8, None, -1), 10, True, False, 4, _always) == + (slice(None), slice(8, None, -1))) + assert (optimize_slicer(slice(8, None, -1), 10, False, False, 4, _never) == + (slice(0, 9, 1), slice(None, None, -1))) + assert (optimize_slicer(slice(9, 0, -1), 10, False, False, 4, _never) == + (slice(1, 10, 1), slice(None, None, -1))) # Non-contiguous - assert ( - optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _never) == - (slice(0, 10, 2), slice(None))) + assert (optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _never) == + (slice(0, 10, 2), slice(None))) # all_full triggers optimization, but optimization does nothing - assert ( - optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _never) == - (slice(0, 10, 2), slice(None))) + assert (optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _never) == + (slice(0, 10, 2), slice(None))) # all_full triggers optimization, optimization does something - assert ( - optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always) == - (slice(None), slice(0, 10, 2))) + assert (optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always) == + (slice(None), slice(0, 10, 2))) # all_full disables optimization, optimization does something - assert ( - optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _always) == - (slice(0, 10, 2), slice(None))) + assert (optimize_slicer(slice(0, 10, 2), 10, False, False, 4, _always) == + (slice(0, 10, 2), slice(None))) # Non contiguous, reversed - assert ( - optimize_slicer(slice(10, None, -2), 10, False, False, 4, _never) == - (slice(1, 10, 2), slice(None, None, -1))) - assert ( - optimize_slicer(slice(10, None, -2), 10, True, False, 4, _always) == - (slice(None), slice(9, None, -2))) + assert (optimize_slicer(slice(10, None, -2), 10, False, False, 4, _never) == + (slice(1, 10, 2), slice(None, None, -1))) + assert (optimize_slicer(slice(10, None, -2), 10, True, False, 4, _always) == + (slice(None), slice(9, None, -2))) # Short non-contiguous - assert ( - optimize_slicer(slice(2, 8, 2), 10, False, False, 4, _never) == - (slice(2, 8, 2), slice(None))) + assert (optimize_slicer(slice(2, 8, 2), 10, False, False, 4, _never) == + (slice(2, 8, 2), slice(None))) # with partial read - assert ( - optimize_slicer(slice(2, 8, 2), 10, True, False, 4, _partial) == - (slice(2, 8, 1), slice(None, None, 2))) + assert (optimize_slicer(slice(2, 8, 2), 10, True, False, 4, _partial) == + (slice(2, 8, 1), slice(None, None, 2))) # If this is the slowest changing dimension, heuristic can upgrade None to # contiguous, but not (None, contiguous) to full # we've done this one already - assert optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always) \ - == (slice(None), slice(0, 10, 2)) + assert (optimize_slicer(slice(0, 10, 2), 10, True, False, 4, _always) == + (slice(None), slice(0, 10, 2))) # if slowest, just upgrade to contiguous - assert ( - optimize_slicer(slice(0, 10, 2), 10, True, True, 4, _always) == - (slice(0, 10, 1), slice(None, None, 2))) + assert (optimize_slicer(slice(0, 10, 2), 10, True, True, 4, _always) == + (slice(0, 10, 1), slice(None, None, 2))) # contiguous does not upgrade to full - assert ( - optimize_slicer(slice(9), 10, True, True, 4, _always) == - (slice(0, 9, 1), slice(None))) + assert optimize_slicer(slice(9), 10, True, True, 4, _always) == (slice(0, 9, 1), slice(None)) # integer - assert ( - optimize_slicer(0, 10, True, False, 4, _never) == - (0, 'dropped')) + assert optimize_slicer(0, 10, True, False, 4, _never) == (0, 'dropped') # can be negative - assert ( - optimize_slicer(-1, 10, True, False, 4, _never) == - (9, 'dropped')) + assert optimize_slicer(-1, 10, True, False, 4, _never) == (9, 'dropped') # or float - assert ( - optimize_slicer(0.9, 10, True, False, 4, _never) == - (0, 'dropped')) + assert optimize_slicer(0.9, 10, True, False, 4, _never) == (0, 'dropped') # should never get 'contiguous' with pytest.raises(ValueError): optimize_slicer(0, 10, True, False, 4, _partial) # full can be forced with heuristic - assert ( - optimize_slicer(0, 10, True, False, 4, _always) == - (slice(None), 0)) + assert optimize_slicer(0, 10, True, False, 4, _always) == (slice(None), 0) # but disabled for slowest changing dimension - assert ( - optimize_slicer(0, 10, True, True, 4, _always) == - (0, 'dropped')) + assert optimize_slicer(0, 10, True, True, 4, _always) == (0, 'dropped') def test_optimize_read_slicers(): # Test function to optimize read slicers - assert (optimize_read_slicers((1,), (10,), 4, _never) == - ((1,), ())) + assert optimize_read_slicers((1,), (10,), 4, _never) == ((1,), ()) assert (optimize_read_slicers((slice(None),), (10,), 4, _never) == - ((slice(None),), (slice(None),))) + ((slice(None),), (slice(None),))) assert (optimize_read_slicers((slice(9),), (10,), 4, _never) == - ((slice(0, 9, 1),), (slice(None),))) + ((slice(0, 9, 1),), (slice(None),))) # optimize cannot update a continuous to a full if last assert (optimize_read_slicers((slice(9),), (10,), 4, _always) == - ((slice(0, 9, 1),), (slice(None),))) + ((slice(0, 9, 1),), (slice(None),))) # optimize can update non-contiguous to continuous even if last # not optimizing assert (optimize_read_slicers((slice(0, 9, 2),), (10,), 4, _never) == - ((slice(0, 9, 2),), (slice(None),))) + ((slice(0, 9, 2),), (slice(None),))) # optimizing assert (optimize_read_slicers((slice(0, 9, 2),), (10,), 4, _always) == - ((slice(0, 9, 1),), (slice(None, None, 2),))) + ((slice(0, 9, 1),), (slice(None, None, 2),))) # Optimize does nothing for integer when last - assert (optimize_read_slicers((1,), (10,), 4, _always) == - ((1,), ())) + assert optimize_read_slicers((1,), (10,), 4, _always) == ((1,), ()) # 2D - assert (optimize_read_slicers( - (slice(None), slice(None)), (10, 6), 4, _never) == - ((slice(None), slice(None)), (slice(None), slice(None)))) + assert (optimize_read_slicers((slice(None), slice(None)), (10, 6), 4, _never) == + ((slice(None), slice(None)), (slice(None), slice(None)))) assert (optimize_read_slicers((slice(None), 1), (10, 6), 4, _never) == - ((slice(None), 1), (slice(None),))) + ((slice(None), 1), (slice(None),))) assert (optimize_read_slicers((1, slice(None)), (10, 6), 4, _never) == - ((1, slice(None)), (slice(None),))) + ((1, slice(None)), (slice(None),))) # Not optimizing a partial slice - assert (optimize_read_slicers( - (slice(9), slice(None)), (10, 6), 4, _never) == - ((slice(0, 9, 1), slice(None)), (slice(None), slice(None)))) + assert (optimize_read_slicers((slice(9), slice(None)), (10, 6), 4, _never) == + ((slice(0, 9, 1), slice(None)), (slice(None), slice(None)))) # Optimizing a partial slice - assert (optimize_read_slicers( - (slice(9), slice(None)), (10, 6), 4, _always) == - ((slice(None), slice(None)), (slice(0, 9, 1), slice(None)))) + assert (optimize_read_slicers((slice(9), slice(None)), (10, 6), 4, _always) == + ((slice(None), slice(None)), (slice(0, 9, 1), slice(None)))) # Optimize cannot update a continuous to a full if last - assert (optimize_read_slicers( - (slice(None), slice(5)), (10, 6), 4, _always) == - ((slice(None), slice(0, 5, 1)), (slice(None), slice(None)))) + assert (optimize_read_slicers((slice(None), slice(5)), (10, 6), 4, _always) == + ((slice(None), slice(0, 5, 1)), (slice(None), slice(None)))) # optimize can update non-contiguous to full if not last # not optimizing - assert (optimize_read_slicers( - (slice(0, 9, 3), slice(None)), (10, 6), 4, _never) == - ((slice(0, 9, 3), slice(None)), (slice(None), slice(None)))) + assert (optimize_read_slicers((slice(0, 9, 3), slice(None)), (10, 6), 4, _never) == + ((slice(0, 9, 3), slice(None)), (slice(None), slice(None)))) # optimizing full - assert (optimize_read_slicers( - (slice(0, 9, 3), slice(None)), (10, 6), 4, _always) == - ((slice(None), slice(None)), (slice(0, 9, 3), slice(None)))) + assert (optimize_read_slicers((slice(0, 9, 3), slice(None)), (10, 6), 4, _always) == + ((slice(None), slice(None)), (slice(0, 9, 3), slice(None)))) # optimizing partial - assert (optimize_read_slicers( - (slice(0, 9, 3), slice(None)), (10, 6), 4, _partial) == - ((slice(0, 9, 1), slice(None)), (slice(None, None, 3), slice(None)))) + assert (optimize_read_slicers((slice(0, 9, 3), slice(None)), (10, 6), 4, _partial) == + ((slice(0, 9, 1), slice(None)), (slice(None, None, 3), slice(None)))) # optimize can update non-contiguous to continuous even if last # not optimizing - assert (optimize_read_slicers( - (slice(None), slice(0, 5, 2)), (10, 6), 4, _never) == - ((slice(None), slice(0, 5, 2)), (slice(None), slice(None)))) + assert (optimize_read_slicers((slice(None), slice(0, 5, 2)), (10, 6), 4, _never) == + ((slice(None), slice(0, 5, 2)), (slice(None), slice(None)))) # optimizing - assert (optimize_read_slicers( - (slice(None), slice(0, 5, 2),), (10, 6), 4, _always) == - ((slice(None), slice(0, 5, 1)), (slice(None), slice(None, None, 2)))) + assert (optimize_read_slicers((slice(None), slice(0, 5, 2),), (10, 6), 4, _always) == + ((slice(None), slice(0, 5, 1)), (slice(None), slice(None, None, 2)))) # Optimize does nothing for integer when last - assert (optimize_read_slicers( - (slice(None), 1), (10, 6), 4, _always) == - ((slice(None), 1), (slice(None),))) + assert (optimize_read_slicers((slice(None), 1), (10, 6), 4, _always) == + ((slice(None), 1), (slice(None),))) # Check gap threshold with 3D _depends0 = partial(threshold_heuristic, skip_thresh=10 * 4 - 1) _depends1 = partial(threshold_heuristic, skip_thresh=10 * 4) assert (optimize_read_slicers( (slice(9), slice(None), slice(None)), (10, 6, 2), 4, _depends0) == - ((slice(None), slice(None), slice(None)), - (slice(0, 9, 1), slice(None), slice(None)))) + ((slice(None), slice(None), slice(None)), (slice(0, 9, 1), slice(None), slice(None)))) assert (optimize_read_slicers( (slice(None), slice(5), slice(None)), (10, 6, 2), 4, _depends0) == - ((slice(None), slice(0, 5, 1), slice(None)), - (slice(None), slice(None), slice(None)))) + ((slice(None), slice(0, 5, 1), slice(None)), (slice(None), slice(None), slice(None)))) assert (optimize_read_slicers( (slice(None), slice(5), slice(None)), (10, 6, 2), 4, _depends1) == - ((slice(None), slice(None), slice(None)), - (slice(None), slice(0, 5, 1), slice(None)))) + ((slice(None), slice(None), slice(None)), (slice(None), slice(0, 5, 1), slice(None)))) # Check longs as integer slices sn = slice(None) - assert (optimize_read_slicers( - (1, 2, 3), (2, 3, 4), 4, _always) == - ((sn, sn, 3), (1, 2))) + assert optimize_read_slicers((1, 2, 3), (2, 3, 4), 4, _always) == ((sn, sn, 3), (1, 2)) def test_slicers2segments(): # Test function to construct segments from slice objects - assert (slicers2segments((0,), (10,), 7, 4) == - [[7, 4]]) - assert (slicers2segments((0, 1), (10, 6), 7, 4) == - [[7 + 10 * 4, 4]]) - assert (slicers2segments((0, 1, 2), (10, 6, 4), 7, 4) == - [[7 + 10 * 4 + 10 * 6 * 2 * 4, 4]]) - assert (slicers2segments((slice(None),), (10,), 7, 4) == - [[7, 10 * 4]]) + assert slicers2segments((0,), (10,), 7, 4) == [[7, 4]] + assert slicers2segments((0, 1), (10, 6), 7, 4) == [[7 + 10 * 4, 4]] + assert slicers2segments((0, 1, 2), (10, 6, 4), 7, 4) == [[7 + 10 * 4 + 10 * 6 * 2 * 4, 4]] + assert slicers2segments((slice(None),), (10,), 7, 4) == [[7, 10 * 4]] assert (slicers2segments((0, slice(None)), (10, 6), 7, 4) == - [[7 + 10 * 4 * i, 4] for i in range(6)]) - assert (slicers2segments((slice(None), 0), (10, 6), 7, 4) == - [[7, 10 * 4]]) - assert (slicers2segments((slice(None), slice(None)), (10, 6), 7, 4) == - [[7, 10 * 6 * 4]]) - assert (slicers2segments( - (slice(None), slice(None), 2), (10, 6, 4), 7, 4) == - [[7 + 10 * 6 * 2 * 4, 10 * 6 * 4]]) + [[7 + 10 * 4 * i, 4] for i in range(6)]) + assert slicers2segments((slice(None), 0), (10, 6), 7, 4) == [[7, 10 * 4]] + assert slicers2segments((slice(None), slice(None)), (10, 6), 7, 4) == [[7, 10 * 6 * 4]] + assert (slicers2segments((slice(None), slice(None), 2), (10, 6, 4), 7, 4) == + [[7 + 10 * 6 * 2 * 4, 10 * 6 * 4]]) def test_calc_slicedefs(): @@ -623,8 +526,7 @@ def test_predict_shape(): for i in range(n_dim): slicers_list.append(_slices_for_len(shape[i])) for sliceobj in product(*slicers_list): - assert (predict_shape(sliceobj, shape) == - arr[sliceobj].shape) + assert predict_shape(sliceobj, shape) == arr[sliceobj].shape # Try some Nones and ellipses assert predict_shape((Ellipsis,), (2, 3)) == (2, 3) assert predict_shape((Ellipsis, 1), (2, 3)) == (2,) @@ -743,10 +645,8 @@ def runtest(): assert numpassed[0] == len(threads) -def _check_slicer(sliceobj, arr, fobj, offset, order, - heuristic=threshold_heuristic): - new_slice = fileslice(fobj, sliceobj, arr.shape, arr.dtype, offset, order, - heuristic) +def _check_slicer(sliceobj, arr, fobj, offset, order, heuristic=threshold_heuristic): + new_slice = fileslice(fobj, sliceobj, arr.shape, arr.dtype, offset, order, heuristic) assert_array_equal(arr[sliceobj], new_slice) From 6be3e94dc488005dc3010f0b34354a144e260fce Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 9 Feb 2020 16:17:26 -0500 Subject: [PATCH 645/689] TEST/STY: Alignment in fileutils, floating tests --- nibabel/tests/test_fileutils.py | 6 ++---- nibabel/tests/test_floating.py | 13 ++++++------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/nibabel/tests/test_fileutils.py b/nibabel/tests/test_fileutils.py index edc8384d4d..ffd7d91b6a 100644 --- a/nibabel/tests/test_fileutils.py +++ b/nibabel/tests/test_fileutils.py @@ -36,8 +36,7 @@ def test_read_zt_byte_strings(): # manually rewind fread.seek(0) # test readout of two strings - assert (read_zt_byte_strings(fread, 2) == - [b'test.fmr', b'test.prt']) + assert read_zt_byte_strings(fread, 2) == [b'test.fmr', b'test.prt'] assert fread.tell() == 18 # test readout of more strings than present fread.seek(0) @@ -48,6 +47,5 @@ def test_read_zt_byte_strings(): read_zt_byte_strings(fread, 2) # Try with a small bufsize fread.seek(0) - assert (read_zt_byte_strings(fread, 2, 4) == - [b'test.fmr', b'test.prt']) + assert read_zt_byte_strings(fread, 2, 4) == [b'test.fmr', b'test.prt'] fread.close() diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index 94d3a396b2..da5f2b6e2b 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -34,9 +34,9 @@ def test_type_info(): info = np.iinfo(dtt) infod = type_info(dtt) assert dict(min=info.min, max=info.max, - nexp=None, nmant=None, - minexp=None, maxexp=None, - width=np.dtype(dtt).itemsize) == infod + nexp=None, nmant=None, + minexp=None, maxexp=None, + width=np.dtype(dtt).itemsize) == infod assert infod['min'].dtype.type == dtt assert infod['max'].dtype.type == dtt for dtt in IEEE_floats + [np.complex64, np.complex64]: @@ -286,7 +286,6 @@ def test_usable_binary128(): yes = have_binary128() with np.errstate(over='ignore'): exp_test = np.longdouble(2) ** 16383 - assert (yes == - (exp_test.dtype.itemsize == 16 and - np.isfinite(exp_test) and - _check_nmant(np.longdouble, 112))) + assert yes == (exp_test.dtype.itemsize == 16 and + np.isfinite(exp_test) and + _check_nmant(np.longdouble, 112)) From 84df0426796f8d796d0f0d19d0476473d78f8cb3 Mon Sep 17 00:00:00 2001 From: Guidotti Roberto Date: Mon, 10 Feb 2020 14:43:59 +0200 Subject: [PATCH 646/689] TEST: cmdline module to pytest --- nibabel/cmdline/tests/test_parrec2nii.py | 5 +- nibabel/cmdline/tests/test_utils.py | 61 ++++++++++++------------ 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/nibabel/cmdline/tests/test_parrec2nii.py b/nibabel/cmdline/tests/test_parrec2nii.py index 9fb556e34d..c41679d84d 100644 --- a/nibabel/cmdline/tests/test_parrec2nii.py +++ b/nibabel/cmdline/tests/test_parrec2nii.py @@ -9,7 +9,6 @@ from nibabel.cmdline import parrec2nii from unittest.mock import Mock, MagicMock, patch -from nose.tools import assert_true from numpy.testing import (assert_almost_equal, assert_array_equal) from nibabel.tests.test_parrec import EG_PAR, VARY_PAR @@ -89,8 +88,8 @@ def test_parrec2nii_save_load_qform_code(*args): for fname in [EG_PAR, VARY_PAR]: parrec2nii.proc_file(fname, opts) outfname = join(pth, basename(fname)).replace('.PAR', '.nii') - assert_true(isfile(outfname)) + assert isfile(outfname) img = nibabel.load(outfname) assert_almost_equal(img.affine, PAR_AFFINE, 4) - assert_array_equal(img.header['qform_code'], 1) + assert img.header['qform_code'] == 1 assert_array_equal(img.header['sform_code'], 1) diff --git a/nibabel/cmdline/tests/test_utils.py b/nibabel/cmdline/tests/test_utils.py index 199eea5d41..eb864c62c0 100644 --- a/nibabel/cmdline/tests/test_utils.py +++ b/nibabel/cmdline/tests/test_utils.py @@ -5,9 +5,10 @@ Test running scripts """ -from nose.tools import assert_equal from numpy.testing import assert_raises +import pytest + import nibabel as nib import numpy as np from nibabel.cmdline.utils import * @@ -19,18 +20,18 @@ def test_table2string(): - assert_equal(table2string([["A", "B", "C", "D"], ["E", "F", "G", "H"]]), "A B C D\nE F G H\n") - assert_equal(table2string([["Let's", "Make", "Tests", "And"], ["Have", "Lots", "Of", "Fun"], - ["With", "Python", "Guys", "!"]]), "Let's Make Tests And\n Have Lots Of Fun"+ - "\n With Python Guys !\n") + assert table2string([["A", "B", "C", "D"], ["E", "F", "G", "H"]]) == "A B C D\nE F G H\n" + assert table2string([["Let's", "Make", "Tests", "And"], ["Have", "Lots", "Of", "Fun"], + ["With", "Python", "Guys", "!"]]) == "Let's Make Tests And\n Have Lots Of Fun"+ \ + "\n With Python Guys !\n" def test_ap(): - assert_equal(ap([1, 2], "%2d"), " 1, 2") - assert_equal(ap([1, 2], "%3d"), " 1, 2") - assert_equal(ap([1, 2], "%-2d"), "1 , 2 ") - assert_equal(ap([1, 2], "%d", "+"), "1+2") - assert_equal(ap([1, 2, 3], "%d", "-"), "1-2-3") + assert ap([1, 2], "%2d") == " 1, 2" + assert ap([1, 2], "%3d") == " 1, 2" + assert ap([1, 2], "%-2d") == "1 , 2 " + assert ap([1, 2], "%d", "+") == "1+2" + assert ap([1, 2, 3], "%d", "-") == "1-2-3" def test_safe_get(): @@ -44,8 +45,8 @@ def get_test(self): test = TestObject() test.test = 2 - assert_equal(safe_get(test, "test"), 2) - assert_equal(safe_get(test, "failtest"), "-") + assert safe_get(test, "test") == 2 + assert safe_get(test, "failtest") == "-" def test_get_headers_diff(): @@ -107,14 +108,14 @@ def test_display_diff(): " " \ "\n" - assert_equal(display_diff(bogus_names, dict_values), expected_output) + assert display_diff(bogus_names, dict_values) == expected_output def test_get_data_diff(): # testing for identical files specifically as md5 may vary by computer test_names = [pjoin(data_path, f) for f in ('standard.nii.gz', 'standard.nii.gz')] - assert_equal(get_data_hash_diff(test_names), []) + assert get_data_hash_diff(test_names) == [] # testing the maximum relative and absolute differences' different use cases test_array = np.arange(16).reshape(4, 4) @@ -124,37 +125,37 @@ def test_get_data_diff(): test_array_5 = np.arange(64).reshape(8, 8) # same shape, 2 files - assert_equal(get_data_diff([test_array, test_array_2]), - OrderedDict([('DATA(diff 1:)', [None, OrderedDict([('abs', 1), ('rel', 2.0)])])])) + assert get_data_diff([test_array, test_array_2]) == \ + OrderedDict([('DATA(diff 1:)', [None, OrderedDict([('abs', 1), ('rel', 2.0)])])]) # same shape, 3 files - assert_equal(get_data_diff([test_array, test_array_2, test_array_3]), + assert get_data_diff([test_array, test_array_2, test_array_3]) == \ OrderedDict([('DATA(diff 1:)', [None, OrderedDict([('abs', 1), ('rel', 2.0)]), OrderedDict([('abs', 2), ('rel', 2.0)])]), ('DATA(diff 2:)', [None, None, - OrderedDict([('abs', 1), ('rel', 0.66666666666666663)])])])) + OrderedDict([('abs', 1), ('rel', 0.66666666666666663)])])]) # same shape, 2 files, modified maximum abs/rel - assert_equal(get_data_diff([test_array, test_array_2], max_abs=2, max_rel=2), OrderedDict()) + assert get_data_diff([test_array, test_array_2], max_abs=2, max_rel=2) == OrderedDict() # different shape, 2 files - assert_equal(get_data_diff([test_array_2, test_array_4]), - OrderedDict([('DATA(diff 1:)', [None, {'CMP': 'incompat'}])])) + assert get_data_diff([test_array_2, test_array_4]) == \ + OrderedDict([('DATA(diff 1:)', [None, {'CMP': 'incompat'}])]) # different shape, 3 files - assert_equal(get_data_diff([test_array_4, test_array_5, test_array_2]), + assert get_data_diff([test_array_4, test_array_5, test_array_2]) == \ OrderedDict([('DATA(diff 1:)', [None, {'CMP': 'incompat'}, {'CMP': 'incompat'}]), - ('DATA(diff 2:)', [None, None, {'CMP': 'incompat'}])])) + ('DATA(diff 2:)', [None, None, {'CMP': 'incompat'}])]) test_return = get_data_diff([test_array, test_array_2], dtype=np.float32) - assert_equal(type(test_return['DATA(diff 1:)'][1]['abs']), np.float32) - assert_equal(type(test_return['DATA(diff 1:)'][1]['rel']), np.float32) + assert type(test_return['DATA(diff 1:)'][1]['abs']) is np.float32 + assert type(test_return['DATA(diff 1:)'][1]['rel']) is np.float32 test_return_2 = get_data_diff([test_array, test_array_2, test_array_3]) - assert_equal(type(test_return_2['DATA(diff 1:)'][1]['abs']), np.float64) - assert_equal(type(test_return_2['DATA(diff 1:)'][1]['rel']), np.float64) - assert_equal(type(test_return_2['DATA(diff 2:)'][2]['abs']), np.float64) - assert_equal(type(test_return_2['DATA(diff 2:)'][2]['rel']), np.float64) + assert type(test_return_2['DATA(diff 1:)'][1]['abs']) is np.float64 + assert type(test_return_2['DATA(diff 1:)'][1]['rel']) is np.float64 + assert type(test_return_2['DATA(diff 2:)'][2]['abs']) is np.float64 + assert type(test_return_2['DATA(diff 2:)'][2]['rel']) is np.float64 def test_main(): @@ -201,4 +202,4 @@ def test_main(): test_names_2 = [pjoin(data_path, f) for f in ('standard.nii.gz', 'standard.nii.gz')] with assert_raises(SystemExit): - assert_equal(main(test_names_2, StringIO()), "These files are identical.") + assert main(test_names_2, StringIO()) == "These files are identical." From 103db6ba2caeb9232d987d8974d309619e3a7fbb Mon Sep 17 00:00:00 2001 From: Guidotti Roberto Date: Mon, 10 Feb 2020 14:44:27 +0200 Subject: [PATCH 647/689] TEST: cifti2 tests to pytest --- nibabel/cifti2/tests/test_axes.py | 110 +++--- nibabel/cifti2/tests/test_cifti2.py | 371 +++++++++++-------- nibabel/cifti2/tests/test_cifti2io_header.py | 189 +++++----- nibabel/cifti2/tests/test_new_cifti2.py | 182 +++++---- 4 files changed, 454 insertions(+), 398 deletions(-) diff --git a/nibabel/cifti2/tests/test_axes.py b/nibabel/cifti2/tests/test_axes.py index 56457187a2..3f6cb3a1a4 100644 --- a/nibabel/cifti2/tests/test_axes.py +++ b/nibabel/cifti2/tests/test_axes.py @@ -1,5 +1,5 @@ import numpy as np -from nose.tools import assert_raises +import pytest from .test_cifti2io_axes import check_rewrite import nibabel.cifti2.cifti2_axes as axes from copy import deepcopy @@ -157,18 +157,18 @@ def test_brain_models(): # break brain model bmt.affine = np.eye(4) - with assert_raises(ValueError): + with pytest.raises(ValueError): bmt.affine = np.eye(3) - with assert_raises(ValueError): + with pytest.raises(ValueError): bmt.affine = np.eye(4).flatten() bmt.volume_shape = (5, 3, 1) - with assert_raises(ValueError): + with pytest.raises(ValueError): bmt.volume_shape = (5., 3, 1) - with assert_raises(ValueError): + with pytest.raises(ValueError): bmt.volume_shape = (5, 3, 1, 4) - with assert_raises(IndexError): + with pytest.raises(IndexError): bmt['thalamus_left'] # Test the constructor @@ -176,22 +176,22 @@ def test_brain_models(): assert np.all(bm_vox.name == ['CIFTI_STRUCTURE_THALAMUS_LEFT'] * 5) assert np.array_equal(bm_vox.vertex, np.full(5, -1)) assert np.array_equal(bm_vox.voxel, np.full((5, 3), 1)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # no volume shape axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # no affine axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), volume_shape=(2, 3, 4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # incorrect name axes.BrainModelAxis('random_name', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # negative voxel indices axes.BrainModelAxis('thalamus_left', voxel=-np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # no voxels or vertices axes.BrainModelAxis('thalamus_left', affine=np.eye(4), volume_shape=(2, 3, 4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # incorrect voxel shape axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 2), dtype=int), affine=np.eye(4), volume_shape=(2, 3, 4)) @@ -199,18 +199,18 @@ def test_brain_models(): assert np.array_equal(bm_vertex.name, ['CIFTI_STRUCTURE_CORTEX_LEFT'] * 5) assert np.array_equal(bm_vertex.vertex, np.full(5, 1)) assert np.array_equal(bm_vertex.voxel, np.full((5, 3), -1)) - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int)) - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_right': 20}) - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.BrainModelAxis('cortex_left', vertex=-np.ones(5, dtype=int), nvertices={'cortex_left': 20}) # test from_mask errors - with assert_raises(ValueError): + with pytest.raises(ValueError): # affine should be 4x4 matrix axes.BrainModelAxis.from_mask(np.arange(5) > 2, affine=np.ones(5)) - with assert_raises(ValueError): + with pytest.raises(ValueError): # only 1D or 3D masks accepted axes.BrainModelAxis.from_mask(np.ones((5, 3))) @@ -226,27 +226,27 @@ def test_brain_models(): assert bm_added.volume_shape == bm_vox.volume_shape axes.ParcelsAxis.from_brain_models([('a', bm_vox), ('b', bm_vox)]) - with assert_raises(Exception): + with pytest.raises(Exception): bm_vox + get_label() bm_other_shape = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4), volume_shape=(4, 3, 4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): bm_vox + bm_other_shape - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.ParcelsAxis.from_brain_models([('a', bm_vox), ('b', bm_other_shape)]) bm_other_affine = axes.BrainModelAxis('thalamus_left', voxel=np.ones((5, 3), dtype=int), affine=np.eye(4) * 2, volume_shape=(2, 3, 4)) - with assert_raises(ValueError): + with pytest.raises(ValueError): bm_vox + bm_other_affine - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.ParcelsAxis.from_brain_models([('a', bm_vox), ('b', bm_other_affine)]) bm_vertex = axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 20}) bm_other_number = axes.BrainModelAxis('cortex_left', vertex=np.ones(5, dtype=int), nvertices={'cortex_left': 30}) - with assert_raises(ValueError): + with pytest.raises(ValueError): bm_vertex + bm_other_number - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.ParcelsAxis.from_brain_models([('a', bm_vertex), ('b', bm_other_number)]) # test equalities @@ -336,29 +336,29 @@ def test_parcels(): assert len(prc2[3:]['mixed'][1]) == 1 assert prc2[3:]['mixed'][1]['CIFTI_STRUCTURE_CORTEX_LEFT'].shape == (3, ) - with assert_raises(IndexError): + with pytest.raises(IndexError): prc['non_existent'] prc['surface'] - with assert_raises(IndexError): + with pytest.raises(IndexError): # parcel exists twice prc2['surface'] # break parcels prc.affine = np.eye(4) - with assert_raises(ValueError): + with pytest.raises(ValueError): prc.affine = np.eye(3) - with assert_raises(ValueError): + with pytest.raises(ValueError): prc.affine = np.eye(4).flatten() prc.volume_shape = (5, 3, 1) - with assert_raises(ValueError): + with pytest.raises(ValueError): prc.volume_shape = (5., 3, 1) - with assert_raises(ValueError): + with pytest.raises(ValueError): prc.volume_shape = (5, 3, 1, 4) # break adding of parcels - with assert_raises(Exception): + with pytest.raises(Exception): prc + get_label() prc = get_parcels() @@ -367,12 +367,12 @@ def test_parcels(): other_prc = get_parcels() other_prc.affine = np.eye(4) * 2 - with assert_raises(ValueError): + with pytest.raises(ValueError): prc + other_prc other_prc = get_parcels() other_prc.volume_shape = (20, 3, 4) - with assert_raises(ValueError): + with pytest.raises(ValueError): prc + other_prc # test parcel equalities @@ -396,13 +396,13 @@ def test_parcels(): prc_other = deepcopy(prc) prc_other.volume_shape = (10, 3, 4) assert prc != prc_other - with assert_raises(ValueError): + with pytest.raises(ValueError): prc + prc_other prc_other = deepcopy(prc) prc_other.nvertices['CIFTI_STRUCTURE_CORTEX_LEFT'] = 80 assert prc != prc_other - with assert_raises(ValueError): + with pytest.raises(ValueError): prc + prc_other prc_other = deepcopy(prc) @@ -434,7 +434,7 @@ def test_parcels(): volume_shape=(2, 3, 4), ) - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.ParcelsAxis( voxels=[np.ones((3, 2), dtype=int)], vertices=[{}], @@ -466,7 +466,7 @@ def test_scalar(): # test equalities assert sc != get_label() - with assert_raises(Exception): + with pytest.raises(Exception): sc + get_label() sc_other = deepcopy(sc) @@ -485,10 +485,10 @@ def test_scalar(): # test constructor assert axes.ScalarAxis(['scalar_name'], [{}]) == axes.ScalarAxis(['scalar_name']) - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.ScalarAxis([['scalar_name']]) # wrong shape - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.ScalarAxis(['scalar_name'], [{}, {}]) # wrong size @@ -514,7 +514,7 @@ def test_label(): # test equalities lab = get_label() assert lab != get_scalar() - with assert_raises(Exception): + with pytest.raises(Exception): lab + get_scalar() other_lab = deepcopy(lab) @@ -540,10 +540,10 @@ def test_label(): # test constructor assert axes.LabelAxis(['scalar_name'], [{}], [{}]) == axes.LabelAxis(['scalar_name'], [{}]) - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.LabelAxis([['scalar_name']], [{}]) # wrong shape - with assert_raises(ValueError): + with pytest.raises(ValueError): axes.LabelAxis(['scalar_name'], [{}, {}]) # wrong size @@ -558,7 +558,7 @@ def test_series(): assert sr[3].unit == 'HERTZ' sr[0].unit = 'hertz' assert sr[0].unit == 'HERTZ' - with assert_raises(ValueError): + with pytest.raises(ValueError): sr[0].unit = 'non_existent' sr = list(get_series()) @@ -570,11 +570,17 @@ def test_series(): assert ((sr[1] + sr[0] + sr[0]).time == np.arange(11) * 10 + 8).all() assert sr[1][2] == 28 assert sr[1][-2] == sr[1].time[-2] - assert_raises(ValueError, lambda: sr[0] + sr[2]) - assert_raises(ValueError, lambda: sr[2] + sr[1]) - assert_raises(ValueError, lambda: sr[0] + sr[3]) - assert_raises(ValueError, lambda: sr[3] + sr[1]) - assert_raises(ValueError, lambda: sr[3] + sr[2]) + + with pytest.raises(ValueError): + sr[0] + sr[2] + with pytest.raises(ValueError): + sr[2] + sr[1] + with pytest.raises(ValueError): + sr[0] + sr[3] + with pytest.raises(ValueError): + sr[3] + sr[1] + with pytest.raises(ValueError): + sr[3] + sr[2] # test slicing assert (sr[0][1:3].time == sr[0].time[1:3]).all() @@ -590,16 +596,16 @@ def test_series(): assert (sr[0][3:1:-1].time == sr[0].time[3:1:-1]).all() assert (sr[0][1:3:-1].time == sr[0].time[1:3:-1]).all() - with assert_raises(IndexError): + with pytest.raises(IndexError): assert sr[0][[0, 1]] - with assert_raises(IndexError): + with pytest.raises(IndexError): assert sr[0][20] - with assert_raises(IndexError): + with pytest.raises(IndexError): assert sr[0][-20] # test_equalities sr = next(get_series()) - with assert_raises(Exception): + with pytest.raises(Exception): sr + get_scalar() assert sr != sr[:2] assert sr == sr[:] diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 8f85b62041..78eac63d1b 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -9,7 +9,7 @@ from nibabel.nifti2 import Nifti2Header from nibabel.cifti2.cifti2 import _float_01, _value_if_klass, Cifti2HeaderError -from nose.tools import assert_true, assert_equal, assert_raises, assert_is_none +import pytest from nibabel.tests.test_dataobj_images import TestDataobjAPI as _TDA @@ -27,244 +27,287 @@ def compare_xml_leaf(str1, str2): def test_value_if_klass(): - assert_equal(_value_if_klass(None, list), None) - assert_equal(_value_if_klass([1], list), [1]) - assert_raises(ValueError, _value_if_klass, 1, list) + assert _value_if_klass(None, list) is None + assert _value_if_klass([1], list) == [1] + with pytest.raises(ValueError): + _value_if_klass(1, list) def test_cifti2_metadata(): md = ci.Cifti2MetaData(metadata={'a': 'aval'}) - assert_equal(len(md), 1) - assert_equal(list(iter(md)), ['a']) - assert_equal(md['a'], 'aval') - assert_equal(md.data, dict([('a', 'aval')])) + assert len(md) == 1 + assert list(iter(md)) == ['a'] + assert md['a'] == 'aval' + assert md.data == dict([('a', 'aval')]) md = ci.Cifti2MetaData() - assert_equal(len(md), 0) - assert_equal(list(iter(md)), []) - assert_equal(md.data, {}) - assert_raises(ValueError, md.difference_update, None) + assert len(md) == 0 + assert list(iter(md)) == [] + assert md.data == {} + with pytest.raises(ValueError): + md.difference_update(None) md['a'] = 'aval' - assert_equal(md['a'], 'aval') - assert_equal(len(md), 1) - assert_equal(md.data, dict([('a', 'aval')])) + assert md['a'] == 'aval' + assert len(md) == 1 + assert md.data == dict([('a', 'aval')]) del md['a'] - assert_equal(len(md), 0) + assert len(md) == 0 metadata_test = [('a', 'aval'), ('b', 'bval')] md.update(metadata_test) - assert_equal(md.data, dict(metadata_test)) + assert md.data == dict(metadata_test) - assert_equal(list(iter(md)), list(iter(collections.OrderedDict(metadata_test)))) + assert list(iter(md)) == list(iter(collections.OrderedDict(metadata_test))) md.update({'a': 'aval', 'b': 'bval'}) - assert_equal(md.data, dict(metadata_test)) + assert md.data == dict(metadata_test) md.update({'a': 'aval', 'd': 'dval'}) - assert_equal(md.data, dict(metadata_test + [('d', 'dval')])) + assert md.data == dict(metadata_test + [('d', 'dval')]) md.difference_update({'a': 'aval', 'd': 'dval'}) - assert_equal(md.data, dict(metadata_test[1:])) + assert md.data == dict(metadata_test[1:]) - assert_raises(KeyError, md.difference_update, {'a': 'aval', 'd': 'dval'}) - assert_equal(md.to_xml().decode('utf-8'), - 'bbval') + with pytest.raises(KeyError): + md.difference_update({'a': 'aval', 'd': 'dval'}) + assert md.to_xml().decode('utf-8') == 'bbval' def test__float_01(): - assert_equal(_float_01(0), 0) - assert_equal(_float_01(1), 1) - assert_equal(_float_01('0'), 0) - assert_equal(_float_01('0.2'), 0.2) - assert_raises(ValueError, _float_01, 1.1) - assert_raises(ValueError, _float_01, -0.1) - assert_raises(ValueError, _float_01, 2) - assert_raises(ValueError, _float_01, -1) - assert_raises(ValueError, _float_01, 'foo') + assert _float_01(0) == 0 + assert _float_01(1) == 1 + assert _float_01('0') == 0 + assert _float_01('0.2') == 0.2 + with pytest.raises(ValueError): + _float_01(1.1) + with pytest.raises(ValueError): + _float_01(-0.1) + with pytest.raises(ValueError): + _float_01(2) + with pytest.raises(ValueError): + _float_01(-1) + with pytest.raises(ValueError): + _float_01('foo') def test_cifti2_labeltable(): lt = ci.Cifti2LabelTable() - assert_equal(len(lt), 0) - assert_raises(ci.Cifti2HeaderError, lt.to_xml) - assert_raises(ci.Cifti2HeaderError, lt._to_xml_element) + assert len(lt) == 0 + with pytest.raises(ci.Cifti2HeaderError): + lt.to_xml() + with pytest.raises(ci.Cifti2HeaderError): + lt._to_xml_element() + label = ci.Cifti2Label(label='Test', key=0) lt[0] = label - assert_equal(len(lt), 1) - assert_equal(dict(lt), {label.key: label}) + assert len(lt) == 1 + assert dict(lt) == {label.key: label} lt.clear() lt.append(label) - assert_equal(len(lt), 1) - assert_equal(dict(lt), {label.key: label}) + assert len(lt) == 1 + assert dict(lt) == {label.key: label} lt.clear() test_tuple = (label.label, label.red, label.green, label.blue, label.alpha) lt[label.key] = test_tuple - assert_equal(len(lt), 1) + assert len(lt) == 1 v = lt[label.key] - assert_equal( - (v.label, v.red, v.green, v.blue, v.alpha), - test_tuple - ) + assert (v.label, v.red, v.green, v.blue, v.alpha) == test_tuple + + with pytest.raises(ValueError): + lt[1] = label + + with pytest.raises(ValueError): + lt[0] = test_tuple[:-1] + + with pytest.raises(ValueError): + lt[0] = ('foo', 1.1, 0, 0, 1) + + with pytest.raises(ValueError): + lt[0] = ('foo', 1.0, -1, 0, 1) + + with pytest.raises(ValueError): + lt[0] = ('foo', 1.0, 0, -0.1, 1) - assert_raises(ValueError, lt.__setitem__, 1, label) - assert_raises(ValueError, lt.__setitem__, 0, test_tuple[:-1]) - assert_raises(ValueError, lt.__setitem__, 0, ('foo', 1.1, 0, 0, 1)) - assert_raises(ValueError, lt.__setitem__, 0, ('foo', 1.0, -1, 0, 1)) - assert_raises(ValueError, lt.__setitem__, 0, ('foo', 1.0, 0, -0.1, 1)) def test_cifti2_label(): lb = ci.Cifti2Label() lb.label = 'Test' lb.key = 0 - assert_equal(lb.rgba, (0, 0, 0, 0)) - assert_true(compare_xml_leaf( - lb.to_xml().decode('utf-8'), - "" - )) + assert lb.rgba == (0, 0, 0, 0) + assert compare_xml_leaf(lb.to_xml().decode('utf-8'), "") lb.red = 0 lb.green = 0.1 lb.blue = 0.2 lb.alpha = 0.3 - assert_equal(lb.rgba, (0, 0.1, 0.2, 0.3)) + assert lb.rgba == (0, 0.1, 0.2, 0.3) - assert_true(compare_xml_leaf( - lb.to_xml().decode('utf-8'), - "" - )) + assert compare_xml_leaf( + lb.to_xml().decode('utf-8'), + "") lb.red = 10 - assert_raises(ci.Cifti2HeaderError, lb.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + lb.to_xml() lb.red = 0 lb.key = 'a' - assert_raises(ci.Cifti2HeaderError, lb.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + lb.to_xml() lb.key = 0 def test_cifti2_parcel(): pl = ci.Cifti2Parcel() - assert_raises(ci.Cifti2HeaderError, pl.to_xml) - assert_raises(TypeError, pl.append_cifti_vertices, None) + with pytest.raises(ci.Cifti2HeaderError): + pl.to_xml() + + with pytest.raises(TypeError): + pl.append_cifti_vertices(None) + + with pytest.raises(ValueError): + ci.Cifti2Parcel(**{'vertices': [1, 2, 3]}) - assert_raises(ValueError, ci.Cifti2Parcel, **{'vertices': [1, 2, 3]}) pl = ci.Cifti2Parcel(name='region', voxel_indices_ijk=ci.Cifti2VoxelIndicesIJK([[1, 2, 3]]), vertices=[ci.Cifti2Vertices([0, 1, 2])]) pl.pop_cifti2_vertices(0) - assert_equal(len(pl.vertices), 0) - assert_equal( - pl.to_xml().decode('utf-8'), - '1 2 3' - ) + + assert len(pl.vertices) == 0 + assert pl.to_xml().decode('utf-8') == '1 2 3' def test_cifti2_vertices(): vs = ci.Cifti2Vertices() - assert_raises(ci.Cifti2HeaderError, vs.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + vs.to_xml() + vs.brain_structure = 'CIFTI_STRUCTURE_OTHER' - assert_equal( - vs.to_xml().decode('utf-8'), - '' - ) - assert_equal(len(vs), 0) + + assert vs.to_xml().decode('utf-8') == '' + + assert len(vs) == 0 vs.extend(np.array([0, 1, 2])) - assert_equal(len(vs), 3) - assert_raises(ValueError, vs.__setitem__, 1, 'a') - assert_raises(ValueError, vs.insert, 1, 'a') - assert_equal( - vs.to_xml().decode('utf-8'), - '0 1 2' - ) + assert len(vs) == 3 + with pytest.raises(ValueError): + vs[1] = 'a' + with pytest.raises(ValueError): + vs.insert(1, 'a') + + assert vs.to_xml().decode('utf-8') == '0 1 2' vs[0] = 10 - assert_equal(vs[0], 10) - assert_equal(len(vs), 3) + assert vs[0] == 10 + assert len(vs) == 3 vs = ci.Cifti2Vertices(vertices=[0, 1, 2]) - assert_equal(len(vs), 3) + assert len(vs) == 3 def test_cifti2_transformationmatrixvoxelindicesijktoxyz(): tr = ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ() - assert_raises(ci.Cifti2HeaderError, tr.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + tr.to_xml() def test_cifti2_surface(): s = ci.Cifti2Surface() - assert_raises(ci.Cifti2HeaderError, s.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + s.to_xml() def test_cifti2_volume(): vo = ci.Cifti2Volume() - assert_raises(ci.Cifti2HeaderError, vo.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + vo.to_xml() def test_cifti2_vertexindices(): vi = ci.Cifti2VertexIndices() - assert_equal(len(vi), 0) - assert_raises(ci.Cifti2HeaderError, vi.to_xml) + assert len(vi) == 0 + with pytest.raises(ci.Cifti2HeaderError): + vi.to_xml() vi.extend(np.array([0, 1, 2])) - assert_equal(len(vi), 3) - assert_equal( - vi.to_xml().decode('utf-8'), - '0 1 2' - ) - assert_raises(ValueError, vi.__setitem__, 0, 'a') + assert len(vi) == 3 + assert vi.to_xml().decode('utf-8') == '0 1 2' + + with pytest.raises(ValueError): + vi[0] = 'a' + vi[0] = 10 - assert_equal(vi[0], 10) - assert_equal(len(vi), 3) + assert vi[0] == 10 + assert len(vi) == 3 def test_cifti2_voxelindicesijk(): vi = ci.Cifti2VoxelIndicesIJK() - assert_raises(ci.Cifti2HeaderError, vi.to_xml) + with pytest.raises(ci.Cifti2HeaderError): + vi.to_xml() vi = ci.Cifti2VoxelIndicesIJK() - assert_equal(len(vi), 0) - assert_raises(ci.Cifti2HeaderError, vi.to_xml) + assert len(vi) == 0 + + with pytest.raises(ci.Cifti2HeaderError): + vi.to_xml() vi.extend(np.array([[0, 1, 2]])) - assert_equal(len(vi), 1) - assert_equal(vi[0], [0, 1, 2]) + + assert len(vi) == 1 + assert vi[0] == [0, 1, 2] vi.append([3, 4, 5]) - assert_equal(len(vi), 2) + assert len(vi) == 2 vi.append([6, 7, 8]) - assert_equal(len(vi), 3) + assert len(vi) == 3 del vi[-1] - assert_equal(len(vi), 2) + assert len(vi) == 2 - assert_equal(vi[1], [3, 4, 5]) + assert vi[1] == [3, 4, 5] vi[1] = [3, 4, 6] - assert_equal(vi[1], [3, 4, 6]) - assert_raises(ValueError, vi.__setitem__, 'a', [1, 2, 3]) - assert_raises(TypeError, vi.__setitem__, [1, 2], [1, 2, 3]) - assert_raises(ValueError, vi.__setitem__, 1, [2, 3]) - assert_equal(vi[1, 1], 4) - assert_raises(ValueError, vi.__setitem__, [1, 1], 'a') - assert_equal(vi[0, 1:], [1, 2]) + assert vi[1] == [3, 4, 6] + with pytest.raises(ValueError): + vi['a'] = [1, 2, 3] + + with pytest.raises(TypeError): + vi[[1, 2]] = [1, 2, 3] + + with pytest.raises(ValueError): + vi[1] = [2, 3] + + assert vi[1, 1] == 4 + + with pytest.raises(ValueError): + vi[[1, 1]] = 'a' + + assert vi[0, 1:] == [1, 2] vi[0, 1] = 10 - assert_equal(vi[0, 1], 10) + assert vi[0, 1] == 10 vi[0, 1] = 1 #test for vi[:, 0] and other slices - assert_raises(NotImplementedError, vi.__getitem__, (slice(None), 0)) - assert_raises(NotImplementedError, vi.__setitem__, (slice(None), 0), 0) - assert_raises(NotImplementedError, vi.__delitem__, (slice(None), 0)) - assert_raises(ValueError, vi.__getitem__, (0, 0, 0)) - assert_raises(ValueError, vi.__setitem__, (0, 0, 0), 0) - - assert_equal( - vi.to_xml().decode('utf-8'), - '0 1 2\n3 4 6' - ) - assert_raises(TypeError, ci.Cifti2VoxelIndicesIJK, [0, 1]) + with pytest.raises(NotImplementedError): + vi[(slice(None), 0)] + with pytest.raises(NotImplementedError): + vi[(slice(None), 0)] = 0 + with pytest.raises(NotImplementedError): + # Don't know how to use remove with slice + vi.__delitem__((slice(None), 0)) + with pytest.raises(ValueError): + vi[(0, 0, 0)] + + with pytest.raises(ValueError): + vi[(0, 0, 0)] = 0 + + assert vi.to_xml().decode('utf-8') == '0 1 2\n3 4 6' + + with pytest.raises(TypeError): + ci.Cifti2VoxelIndicesIJK([0, 1]) + vi = ci.Cifti2VoxelIndicesIJK([[1, 2, 3]]) - assert_equal(len(vi), 1) + assert len(vi) == 1 def test_matrixindicesmap(): @@ -273,59 +316,77 @@ def test_matrixindicesmap(): volume2 = ci.Cifti2Volume() parcel = ci.Cifti2Parcel() - assert_is_none(mim.volume) + assert mim.volume is None mim.append(volume) mim.append(parcel) - assert_equal(mim.volume, volume) - assert_raises(ci.Cifti2HeaderError, mim.insert, 0, volume) - assert_raises(ci.Cifti2HeaderError, mim.__setitem__, 1, volume) + assert mim.volume == volume + with pytest.raises(ci.Cifti2HeaderError): + mim.insert(0, volume) + + with pytest.raises(ci.Cifti2HeaderError): + mim[1] = volume mim[0] = volume2 - assert_equal(mim.volume, volume2) + assert mim.volume == volume2 del mim.volume - assert_is_none(mim.volume) - assert_raises(ValueError, delattr, mim, 'volume') + assert mim.volume is None + with pytest.raises(ValueError): + delattr(mim, 'volume') mim.volume = volume - assert_equal(mim.volume, volume) + assert mim.volume == volume mim.volume = volume2 - assert_equal(mim.volume, volume2) + assert mim.volume == volume2 - assert_raises(ValueError, setattr, mim, 'volume', parcel) + with pytest.raises(ValueError): + setattr(mim, 'volume', parcel) def test_matrix(): m = ci.Cifti2Matrix() - assert_raises(TypeError, m, setattr, 'metadata', ci.Cifti2Parcel()) - assert_raises(TypeError, m.__setitem__, 0, ci.Cifti2Parcel()) - assert_raises(TypeError, m.insert, 0, ci.Cifti2Parcel()) + + with pytest.raises(TypeError): + m(setattr, 'metadata', ci.Cifti2Parcel()) + + with pytest.raises(TypeError): + m[0] = ci.Cifti2Parcel() + + with pytest.raises(TypeError): + m.insert(0, ci.Cifti2Parcel()) mim_none = ci.Cifti2MatrixIndicesMap(None, 'CIFTI_INDEX_TYPE_LABELS') mim_0 = ci.Cifti2MatrixIndicesMap(0, 'CIFTI_INDEX_TYPE_LABELS') mim_1 = ci.Cifti2MatrixIndicesMap(1, 'CIFTI_INDEX_TYPE_LABELS') mim_01 = ci.Cifti2MatrixIndicesMap([0, 1], 'CIFTI_INDEX_TYPE_LABELS') - assert_raises(ci.Cifti2HeaderError, m.insert, 0, mim_none) - assert_equal(m.mapped_indices, []) + with pytest.raises(ci.Cifti2HeaderError): + m.insert(0, mim_none) + + assert m.mapped_indices == [] h = ci.Cifti2Header(matrix=m) - assert_equal(m.mapped_indices, []) + assert m.mapped_indices == [] m.insert(0, mim_0) - assert_equal(h.mapped_indices, [0]) - assert_equal(h.number_of_mapped_indices, 1) - assert_raises(ci.Cifti2HeaderError, m.insert, 0, mim_0) - assert_raises(ci.Cifti2HeaderError, m.insert, 0, mim_01) + assert h.mapped_indices == [0] + assert h.number_of_mapped_indices == 1 + with pytest.raises(ci.Cifti2HeaderError): + m.insert(0, mim_0) + + with pytest.raises(ci.Cifti2HeaderError): + m.insert(0, mim_01) + m[0] = mim_1 - assert_equal(list(m.mapped_indices), [1]) + assert list(m.mapped_indices) == [1] m.insert(0, mim_0) - assert_equal(list(sorted(m.mapped_indices)), [0, 1]) - assert_equal(h.number_of_mapped_indices, 2) - assert_equal(h.get_index_map(0), mim_0) - assert_equal(h.get_index_map(1), mim_1) - assert_raises(ci.Cifti2HeaderError, h.get_index_map, 2) + assert list(sorted(m.mapped_indices)) == [0, 1] + assert h.number_of_mapped_indices == 2 + assert h.get_index_map(0) == mim_0 + assert h.get_index_map(1) == mim_1 + with pytest.raises(ci.Cifti2HeaderError): + h.get_index_map(2) def test_underscoring(): @@ -342,7 +403,7 @@ def test_underscoring(): ) for camel, underscored in pairs: - assert_equal(ci.cifti2._underscore(camel), underscored) + assert ci.cifti2._underscore(camel) == underscored class TestCifti2ImageAPI(_TDA): diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index b8cbd05a32..9347051407 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -21,7 +21,7 @@ from nibabel.tests.test_nifti2 import TestNifti2SingleHeader from numpy.testing import assert_array_almost_equal -from nose.tools import (assert_true, assert_equal, assert_raises) +import pytest NIBABEL_TEST_DATA = pjoin(dirname(nib.__file__), 'tests', 'data') NIFTI2_DATA = pjoin(NIBABEL_TEST_DATA, 'example_nifti2.nii.gz') @@ -46,35 +46,36 @@ def test_read_nifti2(): filemap = ci.Cifti2Image.make_file_map() for k in filemap: filemap[k].fileobj = io.open(NIFTI2_DATA) - assert_raises(ValueError, ci.Cifti2Image.from_file_map, filemap) + with pytest.raises(ValueError): + ci.Cifti2Image.from_file_map(filemap) @needs_nibabel_data('nitest-cifti2') def test_read_internal(): img2 = ci.load(DATA_FILE6) - assert_true(isinstance(img2.header, ci.Cifti2Header)) - assert_equal(img2.shape, (1, 91282)) + assert isinstance(img2.header, ci.Cifti2Header) + assert img2.shape == (1, 91282) @needs_nibabel_data('nitest-cifti2') def test_read_and_proxies(): img2 = nib.load(DATA_FILE6) - assert_true(isinstance(img2.header, ci.Cifti2Header)) - assert_equal(img2.shape, (1, 91282)) + assert isinstance(img2.header, ci.Cifti2Header) + assert img2.shape == (1, 91282) # While we cannot reshape arrayproxies, all images are in-memory - assert_true(not img2.in_memory) + assert not img2.in_memory data = img2.get_fdata() - assert_true(data is not img2.dataobj) + assert data is not img2.dataobj # Uncaching has no effect, images are always array images img2.uncache() - assert_true(data is not img2.get_fdata()) + assert data is not img2.get_fdata() @needs_nibabel_data('nitest-cifti2') def test_version(): for i, dat in enumerate(datafiles): img = nib.load(dat) - assert_equal(LooseVersion(img.header.version), LooseVersion('2')) + assert LooseVersion(img.header.version), LooseVersion('2') @needs_nibabel_data('nitest-cifti2') @@ -84,8 +85,7 @@ def test_readwritedata(): img = ci.load(name) ci.save(img, 'test.nii') img2 = ci.load('test.nii') - assert_equal(len(img.header.matrix), - len(img2.header.matrix)) + assert len(img.header.matrix) == len(img2.header.matrix) # Order should be preserved in load/save for mim1, mim2 in zip(img.header.matrix, img2.header.matrix): @@ -93,14 +93,14 @@ def test_readwritedata(): if isinstance(m_, ci.Cifti2NamedMap)] named_maps2 = [m_ for m_ in mim2 if isinstance(m_, ci.Cifti2NamedMap)] - assert_equal(len(named_maps1), len(named_maps2)) + assert len(named_maps1) == len(named_maps2) for map1, map2 in zip(named_maps1, named_maps2): - assert_equal(map1.map_name, map2.map_name) + assert map1.map_name == map2.map_name if map1.label_table is None: - assert_true(map2.label_table is None) + assert map2.label_table is None else: - assert_equal(len(map1.label_table), - len(map2.label_table)) + assert len(map1.label_table) == len(map2.label_table) + assert_array_almost_equal(img.dataobj, img2.dataobj) @@ -111,8 +111,7 @@ def test_nibabel_readwritedata(): img = nib.load(name) nib.save(img, 'test.nii') img2 = nib.load('test.nii') - assert_equal(len(img.header.matrix), - len(img2.header.matrix)) + assert len(img.header.matrix) == len(img2.header.matrix) # Order should be preserved in load/save for mim1, mim2 in zip(img.header.matrix, img2.header.matrix): @@ -120,14 +119,13 @@ def test_nibabel_readwritedata(): if isinstance(m_, ci.Cifti2NamedMap)] named_maps2 = [m_ for m_ in mim2 if isinstance(m_, ci.Cifti2NamedMap)] - assert_equal(len(named_maps1), len(named_maps2)) + assert len(named_maps1) == len(named_maps2) for map1, map2 in zip(named_maps1, named_maps2): - assert_equal(map1.map_name, map2.map_name) + assert map1.map_name == map2.map_name if map1.label_table is None: - assert_true(map2.label_table is None) + assert map2.label_table is None else: - assert_equal(len(map1.label_table), - len(map2.label_table)) + assert len(map1.label_table) == len(map2.label_table) assert_array_almost_equal(img.dataobj, img2.dataobj) @@ -152,10 +150,10 @@ def test_cifti2types(): for name in datafiles: hdr = ci.load(name).header # Matrix and MetaData aren't conditional, so don't bother counting - assert_true(isinstance(hdr.matrix, ci.Cifti2Matrix)) - assert_true(isinstance(hdr.matrix.metadata, ci.Cifti2MetaData)) + assert isinstance(hdr.matrix, ci.Cifti2Matrix) + assert isinstance(hdr.matrix.metadata, ci.Cifti2MetaData) for mim in hdr.matrix: - assert_true(isinstance(mim, ci.Cifti2MatrixIndicesMap)) + assert isinstance(mim, ci.Cifti2MatrixIndicesMap) counter[ci.Cifti2MatrixIndicesMap] += 1 for map_ in mim: print(map_) @@ -168,21 +166,20 @@ def test_cifti2types(): counter[ci.Cifti2VoxelIndicesIJK] += 1 elif isinstance(map_, ci.Cifti2NamedMap): counter[ci.Cifti2NamedMap] += 1 - assert_true(isinstance(map_.metadata, ci.Cifti2MetaData)) + assert isinstance(map_.metadata, ci.Cifti2MetaData) if isinstance(map_.label_table, ci.Cifti2LabelTable): counter[ci.Cifti2LabelTable] += 1 for label in map_.label_table: - assert_true(isinstance(map_.label_table[label], - ci.Cifti2Label)) + assert isinstance(map_.label_table[label], ci.Cifti2Label) counter[ci.Cifti2Label] += 1 elif isinstance(map_, ci.Cifti2Parcel): counter[ci.Cifti2Parcel] += 1 if isinstance(map_.voxel_indices_ijk, ci.Cifti2VoxelIndicesIJK): counter[ci.Cifti2VoxelIndicesIJK] += 1 - assert_true(isinstance(map_.vertices, list)) + assert isinstance(map_.vertices, list) for vtcs in map_.vertices: - assert_true(isinstance(vtcs, ci.Cifti2Vertices)) + assert isinstance(vtcs, ci.Cifti2Vertices) counter[ci.Cifti2Vertices] += 1 elif isinstance(map_, ci.Cifti2Surface): counter[ci.Cifti2Surface] += 1 @@ -192,19 +189,14 @@ def test_cifti2types(): ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ): counter[ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ] += 1 - assert_equal(list(mim.named_maps), - [m_ for m_ in mim if isinstance(m_, ci.Cifti2NamedMap)]) - assert_equal(list(mim.surfaces), - [m_ for m_ in mim if isinstance(m_, ci.Cifti2Surface)]) - assert_equal(list(mim.parcels), - [m_ for m_ in mim if isinstance(m_, ci.Cifti2Parcel)]) - assert_equal(list(mim.brain_models), - [m_ for m_ in mim if isinstance(m_, ci.Cifti2BrainModel)]) - assert_equal([mim.volume] if mim.volume else [], - [m_ for m_ in mim if isinstance(m_, ci.Cifti2Volume)]) + assert list(mim.named_maps) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2NamedMap)] + assert list(mim.surfaces) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Surface)] + assert list(mim.parcels) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Parcel)] + assert list(mim.brain_models) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2BrainModel)] + assert [mim.volume] == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Volume)] if mim.volume else [] for klass, count in counter.items(): - assert_true(count > 0, "No exercise of " + klass.__name__) + assert count > 0 # "No exercise of " + klass.__name__ @needs_nibabel_data('nitest-cifti2') @@ -237,34 +229,34 @@ def test_read_geometry(): ('CIFTI_STRUCTURE_THALAMUS_RIGHT', 1248, [32, 47, 34], [38, 55, 46])] current_index = 0 for from_file, expected in zip(geometry_mapping.brain_models, expected_geometry): - assert_true(from_file.model_type in ("CIFTI_MODEL_TYPE_SURFACE", "CIFTI_MODEL_TYPE_VOXELS")) - assert_equal(from_file.brain_structure, expected[0]) - assert_equal(from_file.index_offset, current_index) - assert_equal(from_file.index_count, expected[1]) + assert from_file.model_type in ("CIFTI_MODEL_TYPE_SURFACE", "CIFTI_MODEL_TYPE_VOXELS") + assert from_file.brain_structure == expected[0] + assert from_file.index_offset == current_index + assert from_file.index_count == expected[1] current_index += from_file.index_count if from_file.model_type == 'CIFTI_MODEL_TYPE_SURFACE': - assert_equal(from_file.voxel_indices_ijk, None) - assert_equal(len(from_file.vertex_indices), expected[1]) - assert_equal(from_file.vertex_indices[0], expected[2]) - assert_equal(from_file.vertex_indices[-1], expected[3]) - assert_equal(from_file.surface_number_of_vertices, 32492) + assert from_file.voxel_indices_ijk is None + assert len(from_file.vertex_indices) == expected[1] + assert from_file.vertex_indices[0] == expected[2] + assert from_file.vertex_indices[-1] == expected[3] + assert from_file.surface_number_of_vertices == 32492 else: - assert_equal(from_file.vertex_indices, None) - assert_equal(from_file.surface_number_of_vertices, None) - assert_equal(len(from_file.voxel_indices_ijk), expected[1]) - assert_equal(from_file.voxel_indices_ijk[0], expected[2]) - assert_equal(from_file.voxel_indices_ijk[-1], expected[3]) - assert_equal(current_index, img.shape[1]) + assert from_file.vertex_indices is None + assert from_file.surface_number_of_vertices is None + assert len(from_file.voxel_indices_ijk) == expected[1] + assert from_file.voxel_indices_ijk[0] == expected[2] + assert from_file.voxel_indices_ijk[-1], expected[3] + assert current_index == img.shape[1] expected_affine = [[-2, 0, 0, 90], [ 0, 2, 0, -126], [ 0, 0, 2, -72], [ 0, 0, 0, 1]] expected_dimensions = (91, 109, 91) - assert_true((geometry_mapping.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix == - expected_affine).all()) - assert_equal(geometry_mapping.volume.volume_dimensions, expected_dimensions) + assert (geometry_mapping.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix == + expected_affine).all() + assert geometry_mapping.volume.volume_dimensions == expected_dimensions @needs_nibabel_data('nitest-cifti2') @@ -327,18 +319,18 @@ def test_read_parcels(): ('ER_FRB08', ((103, 21514, 26470), (103, 21514, 26470))), ('13b_OFP03', ((60, 21042, 21194), (71, 21040, 21216)))] - assert_equal(img.shape[1], len(expected_parcels)) - assert_equal(len(list(parcel_mapping.parcels)), len(expected_parcels)) + assert img.shape[1] == len(expected_parcels) + assert len(list(parcel_mapping.parcels)) == len(expected_parcels) for (name, expected_surfaces), parcel in zip(expected_parcels, parcel_mapping.parcels): - assert_equal(parcel.name, name) - assert_equal(len(parcel.vertices), 2) + assert parcel.name == name + assert len(parcel.vertices) == 2 for vertices, orientation, (length, first_element, last_element) in zip(parcel.vertices, ('LEFT', 'RIGHT'), expected_surfaces): - assert_equal(len(vertices), length) - assert_equal(vertices[0], first_element) - assert_equal(vertices[-1], last_element) - assert_equal(vertices.brain_structure, 'CIFTI_STRUCTURE_CORTEX_%s' % orientation) + assert len(vertices) == length + assert vertices[0] == first_element + assert vertices[-1] == last_element + assert vertices.brain_structure == 'CIFTI_STRUCTURE_CORTEX_%s' % orientation @needs_nibabel_data('nitest-cifti2') @@ -347,31 +339,31 @@ def test_read_scalar(): scalar_mapping = img.header.matrix.get_index_map(0) expected_names = ('MyelinMap_BC_decurv', 'corrThickness') - assert_equal(img.shape[0], len(expected_names)) - assert_equal(len(list(scalar_mapping.named_maps)), len(expected_names)) + assert img.shape[0] == len(expected_names) + assert len(list(scalar_mapping.named_maps)) == len(expected_names) expected_meta = [('PaletteColorMapping', '\n Date: Mon, 10 Feb 2020 16:02:25 +0200 Subject: [PATCH 648/689] TEST: style fixed --- nibabel/cifti2/tests/test_cifti2.py | 6 +++--- nibabel/cifti2/tests/test_cifti2io_header.py | 6 ++---- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 78eac63d1b..8b31839933 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -289,12 +289,12 @@ def test_cifti2_voxelindicesijk(): #test for vi[:, 0] and other slices with pytest.raises(NotImplementedError): - vi[(slice(None), 0)] + vi[:, 0] with pytest.raises(NotImplementedError): - vi[(slice(None), 0)] = 0 + vi[:, 0] = 0 with pytest.raises(NotImplementedError): # Don't know how to use remove with slice - vi.__delitem__((slice(None), 0)) + del vi[:, 0] with pytest.raises(ValueError): vi[(0, 0, 0)] diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 9347051407..4f9fab4ac4 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -436,10 +436,8 @@ def test_pixdim_log_checks(self): assert fhdr['pixdim'][1]== 2 assert message == self._pixdim_message + '; setting to abs of pixdim values' - - with pytest.raises(raiser[0]): - raiser[1](*raiser[2:]) - + pytest.raises(*raiser) + hdr = HC() hdr['pixdim'][1:4] = 0 # No error or warning fhdr, message, raiser = self.log_chk(hdr, 0) From 3b6896e6e26b4a3e5b381b17d65fcd33b6dde09d Mon Sep 17 00:00:00 2001 From: robbisg Date: Mon, 10 Feb 2020 22:26:38 +0200 Subject: [PATCH 649/689] TEST: fixed comments --- nibabel/cifti2/tests/test_cifti2.py | 22 ++++++++++---------- nibabel/cifti2/tests/test_cifti2io_header.py | 12 +++++------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py index 8b31839933..0d3d550a66 100644 --- a/nibabel/cifti2/tests/test_cifti2.py +++ b/nibabel/cifti2/tests/test_cifti2.py @@ -140,7 +140,8 @@ def test_cifti2_label(): lb.label = 'Test' lb.key = 0 assert lb.rgba == (0, 0, 0, 0) - assert compare_xml_leaf(lb.to_xml().decode('utf-8'), "") + assert compare_xml_leaf(lb.to_xml().decode('utf-8'), + "") lb.red = 0 lb.green = 0.1 @@ -148,9 +149,8 @@ def test_cifti2_label(): lb.alpha = 0.3 assert lb.rgba == (0, 0.1, 0.2, 0.3) - assert compare_xml_leaf( - lb.to_xml().decode('utf-8'), - "") + assert compare_xml_leaf(lb.to_xml().decode('utf-8'), + "") lb.red = 10 with pytest.raises(ci.Cifti2HeaderError): @@ -172,7 +172,7 @@ def test_cifti2_parcel(): pl.append_cifti_vertices(None) with pytest.raises(ValueError): - ci.Cifti2Parcel(**{'vertices': [1, 2, 3]}) + ci.Cifti2Parcel(vertices=[1, 2, 3]) pl = ci.Cifti2Parcel(name='region', voxel_indices_ijk=ci.Cifti2VoxelIndicesIJK([[1, 2, 3]]), @@ -296,10 +296,10 @@ def test_cifti2_voxelindicesijk(): # Don't know how to use remove with slice del vi[:, 0] with pytest.raises(ValueError): - vi[(0, 0, 0)] + vi[0, 0, 0] with pytest.raises(ValueError): - vi[(0, 0, 0)] = 0 + vi[0, 0, 0] = 0 assert vi.to_xml().decode('utf-8') == '0 1 2\n3 4 6' @@ -334,7 +334,7 @@ def test_matrixindicesmap(): del mim.volume assert mim.volume is None with pytest.raises(ValueError): - delattr(mim, 'volume') + del mim.volume mim.volume = volume assert mim.volume == volume @@ -342,14 +342,14 @@ def test_matrixindicesmap(): assert mim.volume == volume2 with pytest.raises(ValueError): - setattr(mim, 'volume', parcel) + mim.volume = parcel def test_matrix(): m = ci.Cifti2Matrix() - with pytest.raises(TypeError): - m(setattr, 'metadata', ci.Cifti2Parcel()) + with pytest.raises(ValueError): + m.metadata = ci.Cifti2Parcel() with pytest.raises(TypeError): m[0] = ci.Cifti2Parcel() diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 4f9fab4ac4..0cbf167809 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -75,7 +75,7 @@ def test_read_and_proxies(): def test_version(): for i, dat in enumerate(datafiles): img = nib.load(dat) - assert LooseVersion(img.header.version), LooseVersion('2') + assert LooseVersion(img.header.version) == LooseVersion('2') @needs_nibabel_data('nitest-cifti2') @@ -193,10 +193,10 @@ def test_cifti2types(): assert list(mim.surfaces) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Surface)] assert list(mim.parcels) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Parcel)] assert list(mim.brain_models) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2BrainModel)] - assert [mim.volume] == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Volume)] if mim.volume else [] + assert ([mim.volume] if mim.volume else []) == [m_ for m_ in mim if isinstance(m_, ci.Cifti2Volume)] for klass, count in counter.items(): - assert count > 0 # "No exercise of " + klass.__name__ + assert count > 0, "No exercise of " + klass.__name__ @needs_nibabel_data('nitest-cifti2') @@ -246,7 +246,7 @@ def test_read_geometry(): assert from_file.surface_number_of_vertices is None assert len(from_file.voxel_indices_ijk) == expected[1] assert from_file.voxel_indices_ijk[0] == expected[2] - assert from_file.voxel_indices_ijk[-1], expected[3] + assert from_file.voxel_indices_ijk[-1] == expected[3] assert current_index == img.shape[1] expected_affine = [[-2, 0, 0, 90], @@ -352,7 +352,7 @@ def test_read_scalar(): assert key in scalar.metadata.data.keys() assert scalar.metadata[key][:len(value)] == value - assert scalar.label_table is None #".dscalar file should not define a label table" + assert scalar.label_table is None, ".dscalar file should not define a label table" @needs_nibabel_data('nitest-cifti2') @@ -433,7 +433,7 @@ def test_pixdim_log_checks(self): hdr = HC() hdr['pixdim'][1] = -2 # severity 35 fhdr, message, raiser = self.log_chk(hdr, 35) - assert fhdr['pixdim'][1]== 2 + assert fhdr['pixdim'][1] == 2 assert message == self._pixdim_message + '; setting to abs of pixdim values' pytest.raises(*raiser) From f8db80a539677dd1dea01ce7da9dbce955775131 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 10 Feb 2020 21:22:37 -0500 Subject: [PATCH 650/689] TEST/RF: Reduce redundancy in test_removalschedule, test failure case --- nibabel/tests/test_removalschedule.py | 102 ++++++++++++++++---------- 1 file changed, 65 insertions(+), 37 deletions(-) diff --git a/nibabel/tests/test_removalschedule.py b/nibabel/tests/test_removalschedule.py index 17f40d8395..28144f3af4 100644 --- a/nibabel/tests/test_removalschedule.py +++ b/nibabel/tests/test_removalschedule.py @@ -1,57 +1,85 @@ from ..pkg_info import cmp_pkg_version +import unittest +from unittest import mock import pytest MODULE_SCHEDULE = [ - ('5.0.0', ['nibabel.keywordonly']), - ('4.0.0', ['nibabel.trackvis']), - ('3.0.0', ['nibabel.minc', 'nibabel.checkwarns']), + ("5.0.0", ["nibabel.keywordonly"]), + ("4.0.0", ["nibabel.trackvis"]), + ("3.0.0", ["nibabel.minc", "nibabel.checkwarns"]), # Verify that the test will be quiet if the schedule outlives the modules - ('1.0.0', ['nibabel.neverexisted']), - ] + ("1.0.0", ["nibabel.nosuchmod"]), +] OBJECT_SCHEDULE = [ - ('3.0.0', [('nibabel.testing', 'catch_warn_reset')]), + ("5.0.0", [("nibabel.pydicom_compat", "dicom_test")]), + ("3.0.0", [("nibabel.testing", "catch_warn_reset")]), # Verify that the test will be quiet if the schedule outlives the modules - ('1.0.0', [('nibabel', 'neverexisted')]), - ] + ("1.0.0", [("nibabel.nosuchmod", "anyobj"), ("nibabel.nifti1", "nosuchobj")]), +] ATTRIBUTE_SCHEDULE = [ - ('5.0.0', [('nibabel.dataobj_images', 'DataobjImage', 'get_data')]), + ("5.0.0", [("nibabel.dataobj_images", "DataobjImage", "get_data")]), # Verify that the test will be quiet if the schedule outlives the modules - ('1.0.0', [('nibabel', 'Nifti1Image', 'neverexisted')]), - ] + ("1.0.0", [("nibabel.nosuchmod", "anyobj", "anyattr"), + ("nibabel.nifti1", "nosuchobj", "anyattr"), + ("nibabel.nifti1", "Nifti1Image", "nosuchattr")]), +] + + +def _filter(schedule): + return [entry for ver, entries in schedule if cmp_pkg_version(ver) < 1 for entry in entries] def test_module_removal(): - for version, to_remove in MODULE_SCHEDULE: - if cmp_pkg_version(version) < 1: - for module in to_remove: - with pytest.raises(ImportError): - __import__(module) - pytest.fail("Time to remove " + module) + for module in _filter(MODULE_SCHEDULE): + with pytest.raises(ImportError): + __import__(module) + assert False, "Time to remove %s" % module def test_object_removal(): - for version, to_remove in OBJECT_SCHEDULE: - if cmp_pkg_version(version) < 1: - for module_name, obj in to_remove: - try: - module = __import__(module_name) - except ImportError: - continue - assert not hasattr(module, obj), "Time to remove %s.%s" % (module_name, obj) + for module_name, obj in _filter(OBJECT_SCHEDULE): + try: + module = __import__(module_name) + except ImportError: + continue + assert not hasattr(module, obj), "Time to remove %s.%s" % (module_name, obj,) def test_attribute_removal(): - for version, to_remove in ATTRIBUTE_SCHEDULE: - if cmp_pkg_version(version) < 1: - for module_name, cls, attr in to_remove: - try: - module = __import__(module_name) - except ImportError: - continue - try: - klass = getattr(module, cls) - except AttributeError: - continue - assert not hasattr(klass, attr), "Time to remove %s.%s.%s" % (module_name, cls, attr) + for module_name, cls, attr in _filter(ATTRIBUTE_SCHEDULE): + try: + module = __import__(module_name) + except ImportError: + continue + try: + klass = getattr(module, cls) + except AttributeError: + continue + assert not hasattr(klass, attr), "Time to remove %s.%s.%s" % (module_name, cls, attr,) + + +# +# Test the tests, making sure that we will get errors when the time comes +# + +_sched = "nibabel.tests.test_removalschedule.{}_SCHEDULE".format + + +@mock.patch(_sched("MODULE"), [("3.0.0", ["nibabel.nifti1"])]) +def test_unremoved_module(): + with pytest.raises(AssertionError): + test_module_removal() + + +@mock.patch(_sched("OBJECT"), [("3.0.0", [("nibabel.nifti1", "Nifti1Image")])]) +def test_unremoved_object(): + with pytest.raises(AssertionError): + test_object_removal() + + +@mock.patch(_sched("ATTRIBUTE"), [("3.0.0", [("nibabel.nifti1", "Nifti1Image", "affine")])]) +def test_unremoved_attr(): + with pytest.raises(AssertionError): + test_attribute_removal() From 9183ffb1a366039ed8e46d4b5f64350704f582f5 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 11 Feb 2020 07:17:39 -0500 Subject: [PATCH 651/689] MNT/FIX: Bump minimum matplotlib to 1.5.2 to match available wheels --- .travis.yml | 5 ++--- nibabel/tests/test_viewers.py | 3 ++- setup.cfg | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 81d3589769..6bcee464ae 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,10 +15,9 @@ env: - DEPENDS="numpy scipy matplotlib h5py pillow pydicom indexed_gzip" - INSTALL_TYPE="setup" - CHECK_TYPE="test" - - OLD_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" - EXTRA_WHEELS="https://3f23b170c54c2533c070-1c8a9b3114517dc5fe17b7c3f8c63a43.ssl.cf2.rackcdn.com" - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS --find-links=$OLD_WHEELS" + - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" python: @@ -45,7 +44,7 @@ matrix: # Absolute minimum dependencies plus oldest MPL - python: 3.5 env: - - DEPENDS="-r min-requirements.txt matplotlib==1.3.1" + - DEPENDS="-r min-requirements.txt matplotlib==1.5.2" # Minimum pydicom dependency - python: 3.5 env: diff --git a/nibabel/tests/test_viewers.py b/nibabel/tests/test_viewers.py index 68710b3126..d4879b46a8 100644 --- a/nibabel/tests/test_viewers.py +++ b/nibabel/tests/test_viewers.py @@ -20,7 +20,8 @@ from nose.tools import assert_raises, assert_true # Need at least MPL 1.3 for viewer tests. -matplotlib, has_mpl, _ = optional_package('matplotlib', min_version='1.3') +# 2020.02.11 - 1.3 wheels are no longer distributed, so the minimum we test with is 1.5 +matplotlib, has_mpl, _ = optional_package('matplotlib', min_version='1.5') needs_mpl = skipif(not has_mpl, 'These tests need matplotlib') if has_mpl: diff --git a/setup.cfg b/setup.cfg index 5543e13d5b..8450d9d4f9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -48,7 +48,7 @@ dev = gitpython twine doc = - matplotlib >= 1.3.1 + matplotlib >= 1.5.2 numpydoc sphinx >=0.3 texext From 50265dcf50005577ce5a522af04410aba7d884ae Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 11 Feb 2020 07:34:37 -0500 Subject: [PATCH 652/689] MNT: Use version 1.5.3 to smooth transition to 3.6 --- .travis.yml | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6bcee464ae..622a19db32 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,7 +44,7 @@ matrix: # Absolute minimum dependencies plus oldest MPL - python: 3.5 env: - - DEPENDS="-r min-requirements.txt matplotlib==1.5.2" + - DEPENDS="-r min-requirements.txt matplotlib==1.5.3" # Minimum pydicom dependency - python: 3.5 env: diff --git a/setup.cfg b/setup.cfg index 8450d9d4f9..c3bcf3714d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -48,7 +48,7 @@ dev = gitpython twine doc = - matplotlib >= 1.5.2 + matplotlib >= 1.5.3 numpydoc sphinx >=0.3 texext From bb9cb15e4fd894598fc454c18f8f910f230d338e Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 14 Feb 2020 13:35:29 -0500 Subject: [PATCH 653/689] RF: minc - delay import of h5py until needed. On Debian systems h5py comes with MPI enabled build. I guess on some misconfigured systems, like my laptop, it might take awhile to import. And since h5py here is imported upon import of `nibabel`, it makes it long(er) to import it. Here is timing on my laptop before the change: $> time python -c 'import nibabel' python -c 'import nibabel' 0.20s user 0.08s system 5% cpu 5.356 total and here is after the change: $> time python -c 'import nibabel' python -c 'import nibabel' 0.13s user 0.02s system 100% cpu 0.150 total --- nibabel/minc2.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index a73114081c..90b039d8da 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -27,8 +27,6 @@ """ import numpy as np -from ._h5py_compat import h5py - from .minc1 import Minc1File, MincHeader, Minc1Image, MincError @@ -158,6 +156,9 @@ class Minc2Image(Minc1Image): @classmethod def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): + # Import of h5py might take awhile for MPI-enabled builds + # So we are importing it here "on demand" + from ._h5py_compat import h5py holder = file_map['image'] if holder.filename is None: raise MincError('MINC2 needs filename for load') From 69674612400f3450d1a7358e0bb1b02d4550dfb6 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 14 Feb 2020 13:35:29 -0500 Subject: [PATCH 654/689] RF: minc - delay import of h5py until needed. On Debian systems h5py comes with MPI enabled build. I guess on some misconfigured systems, like my laptop, it might take awhile to import. And since h5py here is imported upon import of `nibabel`, it makes it long(er) to import it. Here is timing on my laptop before the change: $> time python -c 'import nibabel' python -c 'import nibabel' 0.20s user 0.08s system 5% cpu 5.356 total and here is after the change: $> time python -c 'import nibabel' python -c 'import nibabel' 0.13s user 0.02s system 100% cpu 0.150 total --- nibabel/minc2.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index b27d43f77f..7f03fb66a7 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -28,7 +28,6 @@ import numpy as np from .keywordonly import kw_only_meth -from ._h5py_compat import h5py from .minc1 import Minc1File, MincHeader, Minc1Image, MincError @@ -160,6 +159,9 @@ class Minc2Image(Minc1Image): @classmethod @kw_only_meth(1) def from_file_map(klass, file_map, mmap=True, keep_file_open=None): + # Import of h5py might take awhile for MPI-enabled builds + # So we are importing it here "on demand" + from ._h5py_compat import h5py holder = file_map['image'] if holder.filename is None: raise MincError('MINC2 needs filename for load') From 8db1cd9a6333c76d872e807d81827a21242ba76c Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Sun, 16 Feb 2020 19:39:14 -0500 Subject: [PATCH 655/689] changing testing_pytest back to testing --- nibabel/cifti2/tests/test_new_cifti2.py | 2 +- nibabel/freesurfer/tests/test_io.py | 2 +- nibabel/freesurfer/tests/test_mghformat.py | 2 +- nibabel/gifti/tests/test_gifti.py | 2 +- nibabel/gifti/tests/test_parse_gifti_fast.py | 2 +- .../streamlines/tests/test_array_sequence.py | 2 +- nibabel/streamlines/tests/test_tck.py | 2 +- nibabel/streamlines/tests/test_tractogram.py | 2 +- nibabel/streamlines/tests/test_trk.py | 2 +- nibabel/testing/__init__.py | 52 ++-- .../{testing_pytest => testing}/helpers.py | 0 nibabel/testing_pytest/__init__.py | 223 ------------------ nibabel/testing_pytest/np_features.py | 23 -- nibabel/tests/test_analyze.py | 4 +- nibabel/tests/test_arrayproxy.py | 2 +- nibabel/tests/test_arraywriters.py | 4 +- nibabel/tests/test_brikhead.py | 2 +- nibabel/tests/test_casting.py | 2 +- nibabel/tests/test_deprecator.py | 2 +- nibabel/tests/test_dft.py | 2 +- nibabel/tests/test_ecat.py | 2 +- nibabel/tests/test_floating.py | 2 +- nibabel/tests/test_image_api.py | 4 +- nibabel/tests/test_imageclasses.py | 2 +- nibabel/tests/test_minc1.py | 2 +- nibabel/tests/test_minc2.py | 2 +- nibabel/tests/test_nifti1.py | 5 +- nibabel/tests/test_nifti2.py | 2 +- nibabel/tests/test_openers.py | 2 +- nibabel/tests/test_parrec.py | 2 +- nibabel/tests/test_proxy_api.py | 2 +- nibabel/tests/test_scaling.py | 2 +- nibabel/tests/test_scripts.py | 2 +- nibabel/tests/test_spatialimages.py | 4 +- nibabel/tests/test_spm99analyze.py | 2 +- nibabel/tests/test_testing.py | 2 +- nibabel/tests/test_volumeutils.py | 2 +- nibabel/tests/test_wrapstruct.py | 2 +- 38 files changed, 63 insertions(+), 314 deletions(-) rename nibabel/{testing_pytest => testing}/helpers.py (100%) delete mode 100644 nibabel/testing_pytest/__init__.py delete mode 100644 nibabel/testing_pytest/np_features.py diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index a0ba390b8e..944a1c1576 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -13,7 +13,7 @@ from nibabel.tmpdirs import InTemporaryDirectory import pytest -from ...testing_pytest import ( +from ...testing import ( clear_and_catch_warnings, error_warnings, suppress_warnings, assert_array_equal) affine = [[-1.5, 0, 0, 90], diff --git a/nibabel/freesurfer/tests/test_io.py b/nibabel/freesurfer/tests/test_io.py index e1ec971c65..b2401a11ab 100644 --- a/nibabel/freesurfer/tests/test_io.py +++ b/nibabel/freesurfer/tests/test_io.py @@ -19,7 +19,7 @@ from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...fileslice import strided_scalar -from ...testing_pytest import clear_and_catch_warnings +from ...testing import clear_and_catch_warnings DATA_SDIR = 'fsaverage' diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index e3090ca5fa..e1cfc56b18 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -28,7 +28,7 @@ from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_almost_equal -from ...testing_pytest import data_path +from ...testing import data_path from ...tests import test_spatialimages as tsi from ...tests.test_wrapstruct import _TestLabeledWrapStruct diff --git a/nibabel/gifti/tests/test_gifti.py b/nibabel/gifti/tests/test_gifti.py index 5cb70019d8..2d60482c59 100644 --- a/nibabel/gifti/tests/test_gifti.py +++ b/nibabel/gifti/tests/test_gifti.py @@ -16,7 +16,7 @@ from numpy.testing import assert_array_almost_equal, assert_array_equal import pytest -from ...testing_pytest import clear_and_catch_warnings, test_data +from ...testing import clear_and_catch_warnings, test_data from .test_parse_gifti_fast import (DATA_FILE1, DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6) import itertools diff --git a/nibabel/gifti/tests/test_parse_gifti_fast.py b/nibabel/gifti/tests/test_parse_gifti_fast.py index 5e8150ac64..54d8e78621 100644 --- a/nibabel/gifti/tests/test_parse_gifti_fast.py +++ b/nibabel/gifti/tests/test_parse_gifti_fast.py @@ -23,7 +23,7 @@ from numpy.testing import assert_array_almost_equal import pytest -from ...testing_pytest import clear_and_catch_warnings +from ...testing import clear_and_catch_warnings IO_DATA_PATH = pjoin(dirname(__file__), 'data') diff --git a/nibabel/streamlines/tests/test_array_sequence.py b/nibabel/streamlines/tests/test_array_sequence.py index 0802d19b19..06e19248f4 100644 --- a/nibabel/streamlines/tests/test_array_sequence.py +++ b/nibabel/streamlines/tests/test_array_sequence.py @@ -6,7 +6,7 @@ import numpy as np import pytest -from ...testing_pytest import assert_arrays_equal +from ...testing import assert_arrays_equal from numpy.testing import assert_array_equal from ..array_sequence import ArraySequence, is_array_sequence, concatenate diff --git a/nibabel/streamlines/tests/test_tck.py b/nibabel/streamlines/tests/test_tck.py index fb29776f75..0dfb043d83 100644 --- a/nibabel/streamlines/tests/test_tck.py +++ b/nibabel/streamlines/tests/test_tck.py @@ -16,7 +16,7 @@ import pytest from numpy.testing import assert_array_equal -from ...testing_pytest import data_path, clear_and_catch_warnings +from ...testing import data_path, clear_and_catch_warnings from .test_tractogram import assert_tractogram_equal DATA = {} diff --git a/nibabel/streamlines/tests/test_tractogram.py b/nibabel/streamlines/tests/test_tractogram.py index 16f1df2989..f86594d070 100644 --- a/nibabel/streamlines/tests/test_tractogram.py +++ b/nibabel/streamlines/tests/test_tractogram.py @@ -7,7 +7,7 @@ from collections import defaultdict import pytest -from ...testing_pytest import assert_arrays_equal, clear_and_catch_warnings +from ...testing import assert_arrays_equal, clear_and_catch_warnings from numpy.testing import assert_array_equal, assert_array_almost_equal from .. import tractogram as module_tractogram diff --git a/nibabel/streamlines/tests/test_trk.py b/nibabel/streamlines/tests/test_trk.py index 736f61b820..8fb35fc368 100644 --- a/nibabel/streamlines/tests/test_trk.py +++ b/nibabel/streamlines/tests/test_trk.py @@ -8,7 +8,7 @@ from io import BytesIO import pytest -from ...testing_pytest import data_path, clear_and_catch_warnings, assert_arr_dict_equal +from ...testing import data_path, clear_and_catch_warnings, assert_arr_dict_equal from numpy.testing import assert_array_equal from .test_tractogram import assert_tractogram_equal diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index fbd1128589..52055ebcc3 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -13,22 +13,14 @@ import sys import warnings from pkg_resources import resource_filename -from os.path import dirname, abspath, join as pjoin -import numpy as np -from numpy.testing import assert_array_equal, assert_warns -from numpy.testing import dec -skipif = dec.skipif -slow = dec.slow +import unittest -from ..deprecated import deprecate_with_version as _deprecate_with_version +import numpy as np +from numpy.testing import assert_array_equal -# Allow failed import of nose if not now running tests -try: - from nose.tools import (assert_equal, assert_not_equal, - assert_true, assert_false, assert_raises) -except ImportError: - pass +from .np_features import memmap_after_ufunc +from .helpers import bytesio_filemap, bytesio_round_trip, assert_data_similar from itertools import zip_longest @@ -51,14 +43,12 @@ def test_data(subdir=None, fname=None): data_path = test_data() -from .np_features import memmap_after_ufunc - def assert_dt_equal(a, b): """ Assert two numpy dtype specifiers are equal Avoids failed comparison between int32 / int64 and intp """ - assert_equal(np.dtype(a).str, np.dtype(b).str) + assert np.dtype(a).str == np.dtype(b).str def assert_allclose_safely(a, b, match_nans=True, rtol=1e-5, atol=1e-8): @@ -68,7 +58,7 @@ def assert_allclose_safely(a, b, match_nans=True, rtol=1e-5, atol=1e-8): a, b = np.broadcast_arrays(a, b) if match_nans: nans = np.isnan(a) - np.testing.assert_array_equal(nans, np.isnan(b)) + assert_array_equal(nans, np.isnan(b)) to_test = ~nans else: to_test = np.ones(a.shape, dtype=bool) @@ -81,13 +71,13 @@ def assert_allclose_safely(a, b, match_nans=True, rtol=1e-5, atol=1e-8): a = a.astype(float) if b.dtype.kind in 'ui': b = b.astype(float) - assert_true(np.allclose(a, b, rtol=rtol, atol=atol)) + assert np.allclose(a, b, rtol=rtol, atol=atol) def assert_arrays_equal(arrays1, arrays2): """ Check two iterables yield the same sequence of arrays. """ for arr1, arr2 in zip_longest(arrays1, arrays2, fillvalue=None): - assert_false(arr1 is None or arr2 is None) + assert (arr1 is not None and arr2 is not None) assert_array_equal(arr1, arr2) @@ -204,26 +194,30 @@ class suppress_warnings(error_warnings): filter = 'ignore' -@_deprecate_with_version('catch_warn_reset is deprecated; use ' - 'nibabel.testing.clear_and_catch_warnings.', - since='2.1.0', until='3.0.0') -class catch_warn_reset(clear_and_catch_warnings): - pass - - EXTRA_SET = os.environ.get('NIPY_EXTRA_TESTS', '').split(',') def runif_extra_has(test_str): """Decorator checks to see if NIPY_EXTRA_TESTS env var contains test_str""" - return skipif(test_str not in EXTRA_SET, - "Skip {0} tests.".format(test_str)) + return unittest.skipUnless(test_str in EXTRA_SET, "Skip {0} tests.".format(test_str)) def assert_arr_dict_equal(dict1, dict2): """ Assert that two dicts are equal, where dicts contain arrays """ - assert_equal(set(dict1), set(dict2)) + assert set(dict1) == set(dict2) for key, value1 in dict1.items(): value2 = dict2[key] assert_array_equal(value1, value2) + + +class BaseTestCase(unittest.TestCase): + """ TestCase that does not attempt to run if prefixed with a ``_`` + + This restores the nose-like behavior of skipping so-named test cases + in test runners like pytest. + """ + def setUp(self): + if self.__class__.__name__.startswith('_'): + raise unittest.SkipTest("Base test case - subclass to run") + super().setUp() diff --git a/nibabel/testing_pytest/helpers.py b/nibabel/testing/helpers.py similarity index 100% rename from nibabel/testing_pytest/helpers.py rename to nibabel/testing/helpers.py diff --git a/nibabel/testing_pytest/__init__.py b/nibabel/testing_pytest/__init__.py deleted file mode 100644 index 52055ebcc3..0000000000 --- a/nibabel/testing_pytest/__init__.py +++ /dev/null @@ -1,223 +0,0 @@ -# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# -# See COPYING file distributed along with the NiBabel package for the -# copyright and license terms. -# -### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Utilities for testing ''' - -import re -import os -import sys -import warnings -from pkg_resources import resource_filename - -import unittest - -import numpy as np -from numpy.testing import assert_array_equal - -from .np_features import memmap_after_ufunc -from .helpers import bytesio_filemap, bytesio_round_trip, assert_data_similar - -from itertools import zip_longest - - -def test_data(subdir=None, fname=None): - if subdir is None: - resource = os.path.join('tests', 'data') - elif subdir in ('gifti', 'nicom', 'externals'): - resource = os.path.join(subdir, 'tests', 'data') - else: - raise ValueError("Unknown test data directory: %s" % subdir) - - if fname is not None: - resource = os.path.join(resource, fname) - - return resource_filename('nibabel', resource) - - -# set path to example data -data_path = test_data() - - -def assert_dt_equal(a, b): - """ Assert two numpy dtype specifiers are equal - - Avoids failed comparison between int32 / int64 and intp - """ - assert np.dtype(a).str == np.dtype(b).str - - -def assert_allclose_safely(a, b, match_nans=True, rtol=1e-5, atol=1e-8): - """ Allclose in integers go all wrong for large integers - """ - a = np.atleast_1d(a) # 0d arrays cannot be indexed - a, b = np.broadcast_arrays(a, b) - if match_nans: - nans = np.isnan(a) - assert_array_equal(nans, np.isnan(b)) - to_test = ~nans - else: - to_test = np.ones(a.shape, dtype=bool) - # Deal with float128 inf comparisons (bug in numpy 1.9.2) - # np.allclose(np.float128(np.inf), np.float128(np.inf)) == False - to_test = to_test & (a != b) - a = a[to_test] - b = b[to_test] - if a.dtype.kind in 'ui': - a = a.astype(float) - if b.dtype.kind in 'ui': - b = b.astype(float) - assert np.allclose(a, b, rtol=rtol, atol=atol) - - -def assert_arrays_equal(arrays1, arrays2): - """ Check two iterables yield the same sequence of arrays. """ - for arr1, arr2 in zip_longest(arrays1, arrays2, fillvalue=None): - assert (arr1 is not None and arr2 is not None) - assert_array_equal(arr1, arr2) - - -def assert_re_in(regex, c, flags=0): - """Assert that container (list, str, etc) contains entry matching the regex - """ - if not isinstance(c, (list, tuple)): - c = [c] - for e in c: - if re.match(regex, e, flags=flags): - return - raise AssertionError("Not a single entry matched %r in %r" % (regex, c)) - - -def get_fresh_mod(mod_name=__name__): - # Get this module, with warning registry empty - my_mod = sys.modules[mod_name] - try: - my_mod.__warningregistry__.clear() - except AttributeError: - pass - return my_mod - - -class clear_and_catch_warnings(warnings.catch_warnings): - """ Context manager that resets warning registry for catching warnings - - Warnings can be slippery, because, whenever a warning is triggered, Python - adds a ``__warningregistry__`` member to the *calling* module. This makes - it impossible to retrigger the warning in this module, whatever you put in - the warnings filters. This context manager accepts a sequence of `modules` - as a keyword argument to its constructor and: - - * stores and removes any ``__warningregistry__`` entries in given `modules` - on entry; - * resets ``__warningregistry__`` to its previous state on exit. - - This makes it possible to trigger any warning afresh inside the context - manager without disturbing the state of warnings outside. - - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. - - Parameters - ---------- - record : bool, optional - Specifies whether warnings should be captured by a custom - implementation of ``warnings.showwarning()`` and be appended to a list - returned by the context manager. Otherwise None is returned by the - context manager. The objects appended to the list are arguments whose - attributes mirror the arguments to ``showwarning()``. - - NOTE: nibabel difference from numpy: default is True - - modules : sequence, optional - Sequence of modules for which to reset warnings registry on entry and - restore on exit - - Examples - -------- - >>> import warnings - >>> with clear_and_catch_warnings(modules=[np.core.fromnumeric]): - ... warnings.simplefilter('always') - ... # do something that raises a warning in np.core.fromnumeric - """ - class_modules = () - - def __init__(self, record=True, modules=()): - self.modules = set(modules).union(self.class_modules) - self._warnreg_copies = {} - super(clear_and_catch_warnings, self).__init__(record=record) - - def __enter__(self): - for mod in self.modules: - if hasattr(mod, '__warningregistry__'): - mod_reg = mod.__warningregistry__ - self._warnreg_copies[mod] = mod_reg.copy() - mod_reg.clear() - return super(clear_and_catch_warnings, self).__enter__() - - def __exit__(self, *exc_info): - super(clear_and_catch_warnings, self).__exit__(*exc_info) - for mod in self.modules: - if hasattr(mod, '__warningregistry__'): - mod.__warningregistry__.clear() - if mod in self._warnreg_copies: - mod.__warningregistry__.update(self._warnreg_copies[mod]) - - -class error_warnings(clear_and_catch_warnings): - """ Context manager to check for warnings as errors. Usually used with - ``assert_raises`` in the with block - - Examples - -------- - >>> with error_warnings(): - ... try: - ... warnings.warn('Message', UserWarning) - ... except UserWarning: - ... print('I consider myself warned') - I consider myself warned - """ - filter = 'error' - - def __enter__(self): - mgr = super(error_warnings, self).__enter__() - warnings.simplefilter(self.filter) - return mgr - - -class suppress_warnings(error_warnings): - """ Version of ``catch_warnings`` class that suppresses warnings - """ - filter = 'ignore' - - -EXTRA_SET = os.environ.get('NIPY_EXTRA_TESTS', '').split(',') - - -def runif_extra_has(test_str): - """Decorator checks to see if NIPY_EXTRA_TESTS env var contains test_str""" - return unittest.skipUnless(test_str in EXTRA_SET, "Skip {0} tests.".format(test_str)) - - -def assert_arr_dict_equal(dict1, dict2): - """ Assert that two dicts are equal, where dicts contain arrays - """ - assert set(dict1) == set(dict2) - for key, value1 in dict1.items(): - value2 = dict2[key] - assert_array_equal(value1, value2) - - -class BaseTestCase(unittest.TestCase): - """ TestCase that does not attempt to run if prefixed with a ``_`` - - This restores the nose-like behavior of skipping so-named test cases - in test runners like pytest. - """ - def setUp(self): - if self.__class__.__name__.startswith('_'): - raise unittest.SkipTest("Base test case - subclass to run") - super().setUp() diff --git a/nibabel/testing_pytest/np_features.py b/nibabel/testing_pytest/np_features.py deleted file mode 100644 index 8919542d1c..0000000000 --- a/nibabel/testing_pytest/np_features.py +++ /dev/null @@ -1,23 +0,0 @@ -""" Look for changes in numpy behavior over versions -""" - -import numpy as np - - -def memmap_after_ufunc(): - """ Return True if ufuncs on memmap arrays always return memmap arrays - - This should be True for numpy < 1.12, False otherwise. - - Memoize after first call. We do this to avoid having to call this when - importing nibabel.testing, because we cannot depend on the source file - being present - see gh-571. - """ - if memmap_after_ufunc.result is not None: - return memmap_after_ufunc.result - with open(__file__, 'rb') as fobj: - mm_arr = np.memmap(fobj, mode='r', shape=(10,), dtype=np.uint8) - memmap_after_ufunc.result = isinstance(mm_arr + 1, np.memmap) - return memmap_after_ufunc.result - -memmap_after_ufunc.result = None diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 2b2e78dd23..b092a2334c 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -34,11 +34,11 @@ import pytest from numpy.testing import (assert_array_equal, assert_array_almost_equal) -from ..testing_pytest import (data_path, suppress_warnings, assert_dt_equal) +from ..testing import (data_path, suppress_warnings, assert_dt_equal, + bytesio_filemap, bytesio_round_trip) from .test_wrapstruct import _TestLabeledWrapStruct from . import test_spatialimages as tsi -from ..testing_pytest import bytesio_filemap, bytesio_round_trip header_file = os.path.join(data_path, 'analyze.hdr') diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py index 7b2fcca384..2a509acb88 100644 --- a/nibabel/tests/test_arrayproxy.py +++ b/nibabel/tests/test_arrayproxy.py @@ -27,7 +27,7 @@ from numpy.testing import assert_array_equal, assert_array_almost_equal import pytest -from ..testing_pytest import memmap_after_ufunc +from ..testing import memmap_after_ufunc from .test_fileslice import slicer_samples from .test_openers import patch_indexed_gzip diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 1a1c4eb156..380126d4d4 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -16,8 +16,8 @@ from numpy.testing import assert_array_almost_equal, assert_array_equal import pytest -from ..testing_pytest import (assert_allclose_safely, suppress_warnings, - error_warnings) +from ..testing import (assert_allclose_safely, suppress_warnings, + error_warnings) FLOAT_TYPES = np.sctypes['float'] diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index 60bd008a46..45e149b93b 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -16,7 +16,7 @@ import pytest from numpy.testing import assert_array_equal -from ..testing_pytest import data_path, assert_data_similar +from ..testing import data_path, assert_data_similar from .test_fileslice import slicer_samples diff --git a/nibabel/tests/test_casting.py b/nibabel/tests/test_casting.py index c4f9b9ba9e..b8f56454b5 100644 --- a/nibabel/tests/test_casting.py +++ b/nibabel/tests/test_casting.py @@ -8,7 +8,7 @@ from ..casting import (float_to_int, shared_range, CastingError, int_to_float, as_int, int_abs, floor_log2, able_int_type, best_float, ulp, longdouble_precision_improved) -from ..testing_pytest import suppress_warnings +from ..testing import suppress_warnings from numpy.testing import (assert_array_almost_equal, assert_array_equal) diff --git a/nibabel/tests/test_deprecator.py b/nibabel/tests/test_deprecator.py index c508795cf9..cf56dd598d 100644 --- a/nibabel/tests/test_deprecator.py +++ b/nibabel/tests/test_deprecator.py @@ -10,7 +10,7 @@ from nibabel.deprecator import (_ensure_cr, _add_dep_doc, ExpiredDeprecationError, Deprecator) -from ..testing_pytest import clear_and_catch_warnings +from ..testing import clear_and_catch_warnings _OWN_MODULE = sys.modules[__name__] diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index 9c3a4f5d85..c7c80b0dd9 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -4,7 +4,7 @@ import os from os.path import join as pjoin, dirname from io import BytesIO -from ..testing_pytest import suppress_warnings +from ..testing import suppress_warnings with suppress_warnings(): from .. import dft diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index 918cffc52b..a346c71569 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -21,7 +21,7 @@ from numpy.testing import assert_array_equal, assert_array_almost_equal -from ..testing_pytest import data_path, suppress_warnings +from ..testing import data_path, suppress_warnings from ..tmpdirs import InTemporaryDirectory from .test_wrapstruct import _TestWrapStructBase diff --git a/nibabel/tests/test_floating.py b/nibabel/tests/test_floating.py index da5f2b6e2b..e419eb8868 100644 --- a/nibabel/tests/test_floating.py +++ b/nibabel/tests/test_floating.py @@ -9,7 +9,7 @@ int_to_float, floor_log2, type_info, _check_nmant, _check_maxexp, ok_floats, on_powerpc, have_binary128, longdouble_precision_improved) -from ..testing_pytest import suppress_warnings +from ..testing import suppress_warnings import pytest diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index f6245aa594..8af303914b 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -45,8 +45,8 @@ import pytest from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns, assert_allclose -from ..testing_pytest import (bytesio_round_trip, bytesio_filemap, - assert_data_similar, clear_and_catch_warnings) +from ..testing import (bytesio_round_trip, bytesio_filemap, + assert_data_similar, clear_and_catch_warnings) from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError diff --git a/nibabel/tests/test_imageclasses.py b/nibabel/tests/test_imageclasses.py index 8fc0da4908..193cf38cb9 100644 --- a/nibabel/tests/test_imageclasses.py +++ b/nibabel/tests/test_imageclasses.py @@ -16,7 +16,7 @@ from nibabel.imageclasses import spatial_axes_first, class_map, ext_map -from nibabel.testing_pytest import clear_and_catch_warnings +from nibabel.testing import clear_and_catch_warnings DATA_DIR = pjoin(dirname(__file__), 'data') diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index 837957e566..a908ee6ad9 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -25,7 +25,7 @@ from ..tmpdirs import InTemporaryDirectory from ..deprecator import ExpiredDeprecationError -from ..testing_pytest import assert_data_similar, data_path, clear_and_catch_warnings +from ..testing import assert_data_similar, data_path, clear_and_catch_warnings from numpy.testing import assert_array_equal import pytest diff --git a/nibabel/tests/test_minc2.py b/nibabel/tests/test_minc2.py index 5032f01480..2c2f5c6e51 100644 --- a/nibabel/tests/test_minc2.py +++ b/nibabel/tests/test_minc2.py @@ -15,7 +15,7 @@ from ..minc2 import Minc2File, Minc2Image from .._h5py_compat import h5py, have_h5py, setup_module -from ..testing_pytest import data_path +from ..testing import data_path from . import test_minc1 as tm2 diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 1bb4e5ae2e..6ee1926d8d 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -30,16 +30,17 @@ from .test_arraywriters import rt_err_estimate, IUINT_TYPES from .test_orientations import ALL_ORNTS from .nibabel_data import get_nibabel_data, needs_nibabel_data -from ..testing_pytest import bytesio_filemap, bytesio_round_trip from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal) -from ..testing_pytest import ( +from ..testing import ( clear_and_catch_warnings, data_path, runif_extra_has, suppress_warnings, + bytesio_filemap, + bytesio_round_trip ) import pytest diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index 7f9d32b6b9..d0daf9632e 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -21,7 +21,7 @@ from numpy.testing import assert_array_equal -from ..testing_pytest import data_path +from ..testing import data_path header_file = os.path.join(data_path, 'nifti2.hdr') image_file = os.path.join(data_path, 'example_nifti2.nii.gz') diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index d9d728046e..43712d11bf 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -20,7 +20,7 @@ from unittest import mock import pytest -from ..testing_pytest import error_warnings +from ..testing import error_warnings class Lunk(object): diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index fe607c1982..4c55179672 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -21,7 +21,7 @@ assert_array_equal) import pytest -from ..testing_pytest import (clear_and_catch_warnings, suppress_warnings, +from ..testing import (clear_and_catch_warnings, suppress_warnings, assert_arr_dict_equal) from .test_arrayproxy import check_mmap diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index f8e25c14cc..a5a9e9e051 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -55,7 +55,7 @@ import pytest from numpy.testing import assert_almost_equal, assert_array_equal, assert_allclose -from ..testing_pytest import data_path as DATA_PATH, assert_dt_equal, clear_and_catch_warnings +from ..testing import data_path as DATA_PATH, assert_dt_equal, clear_and_catch_warnings from ..deprecator import ExpiredDeprecationError from ..tmpdirs import InTemporaryDirectory diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index db901adb61..e916770dab 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -13,7 +13,7 @@ from io import BytesIO from ..volumeutils import finite_range, apply_read_scaling, array_to_file, array_from_file from ..casting import type_info -from ..testing_pytest import suppress_warnings +from ..testing import suppress_warnings from .test_volumeutils import _calculate_scale diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index c8c87092bf..1ed6870f07 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -30,7 +30,7 @@ from .test_parrec import (DTI_PAR_BVECS, DTI_PAR_BVALS, EXAMPLE_IMAGES as PARREC_EXAMPLES) from .test_parrec_data import BALLS, AFF_OFF -from ..testing_pytest import assert_data_similar +from ..testing import assert_data_similar def _proc_stdout(stdout): diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index 82da89b3d7..a2a8ddc79d 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -22,7 +22,7 @@ from unittest import TestCase from numpy.testing import assert_array_almost_equal -from ..testing_pytest import ( +from ..testing import ( bytesio_round_trip, clear_and_catch_warnings, suppress_warnings, @@ -654,4 +654,4 @@ class MyHeader(Header): assert len(w) == 0 MyHeader() - assert len(w) == 1 \ No newline at end of file + assert len(w) == 1 diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 3198c43ea7..e84a18ea4f 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -29,7 +29,7 @@ from ..volumeutils import apply_read_scaling, _dt_min_max from ..spatialimages import supported_np_types, HeaderDataError -from ..testing_pytest import ( +from ..testing import ( bytesio_round_trip, bytesio_filemap, assert_allclose_safely, diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 65880c2833..32b77fac6c 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -7,7 +7,7 @@ import numpy as np -from ..testing_pytest import (error_warnings, suppress_warnings, +from ..testing import (error_warnings, suppress_warnings, clear_and_catch_warnings, assert_allclose_safely, get_fresh_mod, assert_re_in, test_data, data_path) import pytest diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index dca5053d74..8b0b6a52cf 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -57,7 +57,7 @@ assert_array_equal) import pytest -from ..testing_pytest import assert_dt_equal, assert_allclose_safely, suppress_warnings +from ..testing import assert_dt_equal, assert_allclose_safely, suppress_warnings #: convenience variables for numpy types FLOAT_TYPES = np.sctypes['float'] diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index 32035db995..883c0ec147 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -34,7 +34,7 @@ from ..spatialimages import HeaderDataError from .. import imageglobals -from ..testing_pytest import BaseTestCase +from ..testing import BaseTestCase from numpy.testing import assert_array_equal import pytest From 01172127e2e85b5d3431260b426d6295a44d2441 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 18 Feb 2020 08:27:31 -0500 Subject: [PATCH 656/689] TEST: Remove last imports of nose --- nibabel/nicom/tests/test_dwiparams.py | 14 ++-- nibabel/streamlines/tests/test_streamlines.py | 76 +++++++++---------- .../streamlines/tests/test_tractogram_file.py | 21 ++--- nibabel/streamlines/tests/test_utils.py | 6 +- 4 files changed, 62 insertions(+), 55 deletions(-) diff --git a/nibabel/nicom/tests/test_dwiparams.py b/nibabel/nicom/tests/test_dwiparams.py index 3b02367951..d0d20e574a 100644 --- a/nibabel/nicom/tests/test_dwiparams.py +++ b/nibabel/nicom/tests/test_dwiparams.py @@ -6,10 +6,9 @@ from ..dwiparams import B2q, q2bg -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +import pytest -from numpy.testing import (assert_array_equal, assert_array_almost_equal, - assert_equal as np_assert_equal) +from numpy.testing import (assert_array_almost_equal, assert_equal as np_assert_equal) def test_b2q(): @@ -27,17 +26,20 @@ def test_b2q(): assert_array_almost_equal(-q * s, B2q(B)) # Massive negative eigs B = np.eye(3) * -1 - assert_raises(ValueError, B2q, B) + with pytest.raises(ValueError): + B2q(B) # no error if we up the tolerance q = B2q(B, tol=1) # Less massive negativity, dropping tol B = np.diag([-1e-14, 10., 1]) - assert_raises(ValueError, B2q, B) + with pytest.raises(ValueError): + B2q(B) assert_array_almost_equal(B2q(B, tol=5e-13), [0, 10, 0]) # Confirm that we assume symmetric B = np.eye(3) B[0, 1] = 1e-5 - assert_raises(ValueError, B2q, B) + with pytest.raises(ValueError): + B2q(B) def test_q2bg(): diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 2e537c63f2..9a2f803117 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -10,9 +10,7 @@ from nibabel.tmpdirs import InTemporaryDirectory from numpy.compat.py3k import asbytes -from nibabel.testing import data_path -from nibabel.testing import clear_and_catch_warnings -from nose.tools import assert_equal, assert_raises, assert_true, assert_false +from nibabel.testing import data_path, clear_and_catch_warnings from .test_tractogram import assert_tractogram_equal from ..tractogram import Tractogram, LazyTractogram @@ -82,50 +80,50 @@ def test_is_supported_detect_format(): # Test is_supported and detect_format functions # Empty file/string f = BytesIO() - assert_false(nib.streamlines.is_supported(f)) - assert_false(nib.streamlines.is_supported("")) - assert_true(nib.streamlines.detect_format(f) is None) - assert_true(nib.streamlines.detect_format("") is None) + assert not nib.streamlines.is_supported(f) + assert not nib.streamlines.is_supported("") + assert nib.streamlines.detect_format(f) is None + assert nib.streamlines.detect_format("") is None # Valid file without extension for tfile_cls in FORMATS.values(): f = BytesIO() f.write(asbytes(tfile_cls.MAGIC_NUMBER)) f.seek(0, os.SEEK_SET) - assert_true(nib.streamlines.is_supported(f)) - assert_true(nib.streamlines.detect_format(f) is tfile_cls) + assert nib.streamlines.is_supported(f) + assert nib.streamlines.detect_format(f) is tfile_cls # Wrong extension but right magic number for tfile_cls in FORMATS.values(): with tempfile.TemporaryFile(mode="w+b", suffix=".txt") as f: f.write(asbytes(tfile_cls.MAGIC_NUMBER)) f.seek(0, os.SEEK_SET) - assert_true(nib.streamlines.is_supported(f)) - assert_true(nib.streamlines.detect_format(f) is tfile_cls) + assert nib.streamlines.is_supported(f) + assert nib.streamlines.detect_format(f) is tfile_cls # Good extension but wrong magic number for ext, tfile_cls in FORMATS.items(): with tempfile.TemporaryFile(mode="w+b", suffix=ext) as f: f.write(b"pass") f.seek(0, os.SEEK_SET) - assert_false(nib.streamlines.is_supported(f)) - assert_true(nib.streamlines.detect_format(f) is None) + assert not nib.streamlines.is_supported(f) + assert nib.streamlines.detect_format(f) is None # Wrong extension, string only f = "my_tractogram.asd" - assert_false(nib.streamlines.is_supported(f)) - assert_true(nib.streamlines.detect_format(f) is None) + assert not nib.streamlines.is_supported(f) + assert nib.streamlines.detect_format(f) is None # Good extension, string only for ext, tfile_cls in FORMATS.items(): f = "my_tractogram" + ext - assert_true(nib.streamlines.is_supported(f)) - assert_equal(nib.streamlines.detect_format(f), tfile_cls) + assert nib.streamlines.is_supported(f) + assert nib.streamlines.detect_format(f) == tfile_cls # Extension should not be case-sensitive. for ext, tfile_cls in FORMATS.items(): f = "my_tractogram" + ext.upper() - assert_true(nib.streamlines.detect_format(f) is tfile_cls) + assert nib.streamlines.detect_format(f) is tfile_cls class TestLoadSave(unittest.TestCase): @@ -135,12 +133,12 @@ def test_load_empty_file(self): for empty_filename in DATA['empty_filenames']: tfile = nib.streamlines.load(empty_filename, lazy_load=lazy_load) - assert_true(isinstance(tfile, TractogramFile)) + assert isinstance(tfile, TractogramFile) if lazy_load: - assert_true(type(tfile.tractogram), Tractogram) + assert type(tfile.tractogram), Tractogram else: - assert_true(type(tfile.tractogram), LazyTractogram) + assert type(tfile.tractogram), LazyTractogram assert_tractogram_equal(tfile.tractogram, DATA['empty_tractogram']) @@ -150,12 +148,12 @@ def test_load_simple_file(self): for simple_filename in DATA['simple_filenames']: tfile = nib.streamlines.load(simple_filename, lazy_load=lazy_load) - assert_true(isinstance(tfile, TractogramFile)) + assert isinstance(tfile, TractogramFile) if lazy_load: - assert_true(type(tfile.tractogram), Tractogram) + assert type(tfile.tractogram), Tractogram else: - assert_true(type(tfile.tractogram), LazyTractogram) + assert type(tfile.tractogram), LazyTractogram assert_tractogram_equal(tfile.tractogram, DATA['simple_tractogram']) @@ -165,12 +163,12 @@ def test_load_complex_file(self): for complex_filename in DATA['complex_filenames']: tfile = nib.streamlines.load(complex_filename, lazy_load=lazy_load) - assert_true(isinstance(tfile, TractogramFile)) + assert isinstance(tfile, TractogramFile) if lazy_load: - assert_true(type(tfile.tractogram), Tractogram) + assert type(tfile.tractogram), Tractogram else: - assert_true(type(tfile.tractogram), LazyTractogram) + assert type(tfile.tractogram), LazyTractogram tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4)) @@ -191,19 +189,19 @@ def test_save_tractogram_file(self): trk_file = trk.TrkFile(tractogram) # No need for keyword arguments. - assert_raises(ValueError, nib.streamlines.save, - trk_file, "dummy.trk", header={}) + with self.assertRaises(ValueError): + nib.streamlines.save(trk_file, "dummy.trk", header={}) # Wrong extension. with clear_and_catch_warnings(record=True, modules=[nib.streamlines]) as w: trk_file = trk.TrkFile(tractogram) - assert_raises(ValueError, nib.streamlines.save, - trk_file, "dummy.tck", header={}) + with self.assertRaises(ValueError): + nib.streamlines.save(trk_file, "dummy.tck", header={}) - assert_equal(len(w), 1) - assert_true(issubclass(w[0].category, ExtensionWarning)) - assert_true("extension" in str(w[0].message)) + assert len(w) == 1 + assert issubclass(w[0].category, ExtensionWarning) + assert "extension" in str(w[0].message) with InTemporaryDirectory(): nib.streamlines.save(trk_file, "dummy.trk") @@ -250,9 +248,9 @@ def test_save_complex_file(self): ((not cls.SUPPORTS_DATA_PER_POINT) + (not cls.SUPPORTS_DATA_PER_STREAMLINE)) - assert_equal(len(w), nb_expected_warnings) + assert len(w) == nb_expected_warnings for i in range(nb_expected_warnings): - assert_true(issubclass(w[i].category, Warning)) + assert issubclass(w[i].category, Warning) tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4)) @@ -281,10 +279,12 @@ def test_save_sliced_tractogram(self): assert_tractogram_equal(tractogram, original_tractogram) def test_load_unknown_format(self): - assert_raises(ValueError, nib.streamlines.load, "") + with self.assertRaises(ValueError): + nib.streamlines.load("") def test_save_unknown_format(self): - assert_raises(ValueError, nib.streamlines.save, Tractogram(), "") + with self.assertRaises(ValueError): + nib.streamlines.save(Tractogram(), "") def test_save_from_generator(self): tractogram = Tractogram(DATA['streamlines'], diff --git a/nibabel/streamlines/tests/test_tractogram_file.py b/nibabel/streamlines/tests/test_tractogram_file.py index da5bce4b3f..2550ecf03d 100644 --- a/nibabel/streamlines/tests/test_tractogram_file.py +++ b/nibabel/streamlines/tests/test_tractogram_file.py @@ -4,7 +4,7 @@ from ..tractogram import Tractogram from ..tractogram_file import TractogramFile -from nose.tools import assert_raises, assert_equal +import pytest def test_subclassing_tractogram_file(): @@ -23,7 +23,8 @@ def load(cls, fileobj, lazy_load=True): def create_empty_header(cls): return None - assert_raises(TypeError, DummyTractogramFile, Tractogram()) + with pytest.raises(TypeError): + DummyTractogramFile(Tractogram()) # Missing 'load' method class DummyTractogramFile(TractogramFile): @@ -38,7 +39,8 @@ def save(self, fileobj): def create_empty_header(cls): return None - assert_raises(TypeError, DummyTractogramFile, Tractogram()) + with pytest.raises(TypeError): + DummyTractogramFile(Tractogram()) # Now we have everything required. class DummyTractogramFile(TractogramFile): @@ -57,12 +59,14 @@ def save(self, fileobj): dtf = DummyTractogramFile(Tractogram()) # Default create_empty_header is empty dict - assert_equal(dtf.header, {}) + assert dtf.header == {} def test_tractogram_file(): - assert_raises(NotImplementedError, TractogramFile.is_correct_format, "") - assert_raises(NotImplementedError, TractogramFile.load, "") + with pytest.raises(NotImplementedError): + TractogramFile.is_correct_format("") + with pytest.raises(NotImplementedError): + TractogramFile.load("") # Testing calling the 'save' method of `TractogramFile` object. class DummyTractogramFile(TractogramFile): @@ -78,6 +82,5 @@ def load(cls, fileobj, lazy_load=True): def save(self, fileobj): pass - assert_raises(NotImplementedError, - super(DummyTractogramFile, - DummyTractogramFile(Tractogram)).save, "") + with pytest.raises(NotImplementedError): + super(DummyTractogramFile, DummyTractogramFile(Tractogram)).save("") diff --git a/nibabel/streamlines/tests/test_utils.py b/nibabel/streamlines/tests/test_utils.py index 939ee9bb9e..bcdde6d013 100644 --- a/nibabel/streamlines/tests/test_utils.py +++ b/nibabel/streamlines/tests/test_utils.py @@ -4,7 +4,8 @@ from nibabel.testing import data_path from numpy.testing import assert_array_equal -from nose.tools import assert_raises + +import pytest from ..utils import get_affine_from_reference @@ -17,7 +18,8 @@ def test_get_affine_from_reference(): # Get affine from an numpy array. assert_array_equal(get_affine_from_reference(affine), affine) wrong_ref = np.array([[1, 2, 3], [4, 5, 6]]) - assert_raises(ValueError, get_affine_from_reference, wrong_ref) + with pytest.raises(ValueError): + get_affine_from_reference(wrong_ref) # Get affine from a `SpatialImage`. assert_array_equal(get_affine_from_reference(img), affine) From c9d9eba5ee1e51d53a3398257c9e55b6be12b2c7 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 18 Feb 2020 08:31:21 -0500 Subject: [PATCH 657/689] MNT: Purge nose from requirements, CI --- .azure-pipelines/windows.yml | 18 ------------------ .travis.yml | 21 --------------------- azure-pipelines.yml | 4 ---- dev-requirements.txt | 1 - setup.cfg | 5 ----- 5 files changed, 49 deletions(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index fcdd542223..970a3b788d 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -36,24 +36,6 @@ jobs: python -m pip install .[$(CHECK_TYPE)] SET NIBABEL_DATA_DIR=%CD%\\nibabel-data displayName: 'Install nibabel' - - script: | - mkdir for_testing - cd for_testing - cp ../.coveragerc . - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel ^ - -I test_data ^ - -I test_environment ^ - -I test_euler ^ - -I test_giftiio ^ - -I test_netcdf ^ - -I test_pkg_info ^ - -I test_quaternions ^ - -I test_scaling ^ - -I test_scripts ^ - -I test_spaces ^ - -I test_testing - displayName: 'Nose tests' - condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'nosetests')) - script: | mkdir for_testing cd for_testing diff --git a/.travis.yml b/.travis.yml index 9ca407a757..88bd146c14 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,10 +27,6 @@ python: jobs: include: - # Old nosetests - Remove soon - - python: 3.7 - env: - - CHECK_TYPE="nosetests" # Basic dependencies only - python: 3.5 env: @@ -127,23 +123,6 @@ script: cd doc make html; make doctest; - elif [ "${CHECK_TYPE}" == "nosetests" ]; then - # Change into an innocuous directory and find tests from installation - mkdir for_testing - cd for_testing - cp ../.coveragerc . - nosetests --with-doctest --with-coverage --cover-package nibabel nibabel \ - -I test_data \ - -I test_environment \ - -I test_euler \ - -I test_giftiio \ - -I test_netcdf \ - -I test_pkg_info \ - -I test_quaternions \ - -I test_scaling \ - -I test_scripts \ - -I test_spaces \ - -I test_testing elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 2ef2539c74..d09c5b7740 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -34,7 +34,3 @@ jobs: py38-x64: PYTHON_VERSION: '3.8' PYTHON_ARCH: 'x64' - nosetests: - PYTHON_VERSION: '3.6' - PYTHON_ARCH: 'x64' - CHECK_TYPE: 'nosetests' diff --git a/dev-requirements.txt b/dev-requirements.txt index aa0980c3b4..69302061bc 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,4 +1,3 @@ # Requirements for running tests -r requirements.txt -nose pytest diff --git a/setup.cfg b/setup.cfg index 13edca59ab..fd3e3407e1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,13 +55,8 @@ spm = scipy style = flake8 -nosetests = - coverage - nose >=0.11 - pytest test = coverage - nose >=0.11 pytest !=5.3.4 pytest-cov all = From 1c6ea64ae34caf322b7a89f02346b781a656f454 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 18 Feb 2020 11:23:34 -0500 Subject: [PATCH 658/689] RF: Remove nose test state code --- nibabel/__init__.py | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 2d3428289c..4cdc6018e6 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -36,29 +36,6 @@ For more detailed information see the :ref:`manual`. """ -# Package-wide test setup and teardown -_test_states = { - # Numpy changed print options in 1.14; we can update docstrings and remove - # these when our minimum for building docs exceeds that - 'legacy_printopt': None, - } - -def setup_package(): - """ Set numpy print style to legacy="1.13" for newer versions of numpy """ - import numpy as np - from distutils.version import LooseVersion - if LooseVersion(np.__version__) >= LooseVersion('1.14'): - if _test_states.get('legacy_printopt') is None: - _test_states['legacy_printopt'] = np.get_printoptions().get('legacy') - np.set_printoptions(legacy="1.13") - -def teardown_package(): - """ Reset print options when tests finish """ - import numpy as np - if _test_states.get('legacy_printopt') is not None: - np.set_printoptions(legacy=_test_states.pop('legacy_printopt')) - - # module imports from . import analyze as ana from . import spm99analyze as spm99 From ff922aa988f0ef446a579fc7a0f05653b51de847 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 18 Feb 2020 11:24:26 -0500 Subject: [PATCH 659/689] RF: Reimplement nibabel.test() and nibabel.bench() --- nibabel/__init__.py | 102 ++++++++++++++++++++++-- nibabel/benchmarks/pytest.benchmark.ini | 4 + setup.cfg | 1 + 3 files changed, 102 insertions(+), 5 deletions(-) create mode 100644 nibabel/benchmarks/pytest.benchmark.ini diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 4cdc6018e6..7c096a0033 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -69,13 +69,105 @@ from . import streamlines from . import viewers -from numpy.testing import Tester -test = Tester().test -bench = Tester().bench -del Tester - from .pkg_info import get_pkg_info as _get_pkg_info def get_info(): return _get_pkg_info(os.path.dirname(__file__)) + + +def test(label=None, verbose=1, extra_argv=None, + doctests=False, coverage=False, raise_warnings=None, + timer=False): + """ + Run tests for nibabel using pytest + + The protocol mimics the ``numpy.testing.NoseTester.test()``. + Not all features are currently implemented. + + Parameters + ---------- + label : None + Unused. + verbose: int, optional + Verbosity value for test outputs. Positive values increase verbosity, and + negative values decrease it. Default is 1. + extra_argv : list, optional + List with any extra arguments to pass to pytest. + doctests: bool, optional + If True, run doctests in module. Default is False. + coverage: bool, optional + If True, report coverage of NumPy code. Default is False. + (This requires the + `coverage module `_). + raise_warnings : None + Unused. + timer : False + Unused. + + Returns + ------- + code : ExitCode + Returns the result of running the tests as a ``pytest.ExitCode`` enum + """ + import pytest + args = [] + + if label is not None: + raise NotImplementedError("Labels cannot be set at present") + + try: + verbose = int(verbose) + except ValueError: + pass + else: + if verbose > 0: + args.append("-" + "v" * verbose) + elif verbose < 0: + args.append("-" + "q" * -verbose) + + if extra_argv: + args.extend(extra_argv) + if doctests: + args.append("--doctest-modules") + if coverage: + args.extend(["--cov", "nibabel"]) + if raise_warnings: + raise NotImplementedError("Warning filters are not implemented") + if timer: + raise NotImplementedError("Timing is not implemented") + + args.extend(["--pyargs", "nibabel"]) + + pytest.main(args=args) + + +def bench(label=None, verbose=1, extra_argv=None): + """ + Run benchmarks for nibabel using pytest + + The protocol mimics the ``numpy.testing.NoseTester.bench()``. + Not all features are currently implemented. + + Parameters + ---------- + label : None + Unused. + verbose: int, optional + Verbosity value for test outputs. Positive values increase verbosity, and + negative values decrease it. Default is 1. + extra_argv : list, optional + List with any extra arguments to pass to pytest. + + Returns + ------- + code : ExitCode + Returns the result of running the tests as a ``pytest.ExitCode`` enum + """ + from pkg_resources import resource_filename + config = resource_filename("nibabel", "benchmarks/pytest.benchmark.ini") + args = [] + if extra_argv is not None: + args.extend(extra_argv) + args.extend(["-c", config]) + test(label, verbose, extra_argv=args) diff --git a/nibabel/benchmarks/pytest.benchmark.ini b/nibabel/benchmarks/pytest.benchmark.ini new file mode 100644 index 0000000000..734e6c7d4c --- /dev/null +++ b/nibabel/benchmarks/pytest.benchmark.ini @@ -0,0 +1,4 @@ +[pytest] +python_files = bench_*.py +python_functions = bench_* +addopts = --capture=no diff --git a/setup.cfg b/setup.cfg index fd3e3407e1..a180f71c8d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -82,6 +82,7 @@ console_scripts = nibabel = tests/data/* */tests/data/* + benchmarks/pytest.benchmark.ini [flake8] max-line-length = 100 From 6dc8f0fecbc204f9a11cea0d2b273e77fca55a59 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 18 Feb 2020 11:27:39 -0500 Subject: [PATCH 660/689] DOC: Replace nose with pytest in the docs --- doc/source/devel/advanced_testing.rst | 2 +- doc/source/devel/make_release.rst | 2 +- doc/source/installation.rst | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/source/devel/advanced_testing.rst b/doc/source/devel/advanced_testing.rst index 0dc365ea1d..77b6522cb1 100644 --- a/doc/source/devel/advanced_testing.rst +++ b/doc/source/devel/advanced_testing.rst @@ -25,7 +25,7 @@ Long-running tests Long-running tests are not enabled by default, and can be resource-intensive. To run these tests: * Set environment variable ``NIPY_EXTRA_TESTS=slow``; -* Run ``nosetests``. +* Run ``pytest nibabel``. Note that some tests may require a machine with >4GB of RAM. diff --git a/doc/source/devel/make_release.rst b/doc/source/devel/make_release.rst index 25db5210b7..6a09d280b2 100644 --- a/doc/source/devel/make_release.rst +++ b/doc/source/devel/make_release.rst @@ -79,7 +79,7 @@ Release checklist * Make sure all tests pass (from the nibabel root directory):: - nosetests --with-doctest nibabel + pytest --doctest-modules nibabel * Make sure you are set up to use the ``try_branch.py`` - see https://github.com/nipy/nibotmi/blob/master/install.rst#trying-a-set-of-changes-on-the-buildbots diff --git a/doc/source/installation.rst b/doc/source/installation.rst index ed390578ff..fe02bcdbf2 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -90,7 +90,7 @@ Requirements * h5py_ (optional, for MINC2 support) * PyDICOM_ 0.9.9 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) -* nose_ 0.11 or greater and pytest_ (optional, to run the tests) +* pytest_ (optional, to run the tests) * sphinx_ (optional, to build the documentation) Get the development sources @@ -128,7 +128,7 @@ module to see if everything is fine. It should look something like this:: >>> -To run the nibabel test suite, from the terminal run ``nosetests nibabel`` or +To run the nibabel test suite, from the terminal run ``pytest nibabel`` or ``python -c "import nibabel; nibabel.test()``. To run an extended test suite that validates ``nibabel`` for long-running and From 007925586224fe0fd5243ff1011725adffcc5e8d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 18 Feb 2020 11:33:31 -0500 Subject: [PATCH 661/689] DOC: Update benchmark docstrings --- nibabel/benchmarks/bench_array_to_file.py | 8 ++------ nibabel/benchmarks/bench_arrayproxy_slicing.py | 8 ++------ nibabel/benchmarks/bench_fileslice.py | 8 ++------ nibabel/benchmarks/bench_finite_range.py | 8 ++------ nibabel/benchmarks/bench_load_save.py | 8 ++------ nibabel/benchmarks/bench_streamlines.py | 8 ++------ 6 files changed, 12 insertions(+), 36 deletions(-) diff --git a/nibabel/benchmarks/bench_array_to_file.py b/nibabel/benchmarks/bench_array_to_file.py index 4908848685..776a93000c 100644 --- a/nibabel/benchmarks/bench_array_to_file.py +++ b/nibabel/benchmarks/bench_array_to_file.py @@ -5,13 +5,9 @@ import nibabel as nib nib.bench() -If you have doctests enabled by default in nose (with a noserc file or -environment variable), and you have a numpy version <= 1.6.1, this will also -run the doctests, let's hope they pass. +Run this benchmark with:: -Run this benchmark with: - - nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_load_save.py + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_load_save.py """ import sys diff --git a/nibabel/benchmarks/bench_arrayproxy_slicing.py b/nibabel/benchmarks/bench_arrayproxy_slicing.py index 7fe79763d0..2ed9ec9ccd 100644 --- a/nibabel/benchmarks/bench_arrayproxy_slicing.py +++ b/nibabel/benchmarks/bench_arrayproxy_slicing.py @@ -5,13 +5,9 @@ import nibabel as nib nib.bench() -If you have doctests enabled by default in nose (with a noserc file or -environment variable), and you have a numpy version <= 1.6.1, this will also -run the doctests, let's hope they pass. +Run this benchmark with:: -Run this benchmark with: - - nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_arrayproxy_slicing.py + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_arrayproxy_slicing.py """ from timeit import timeit diff --git a/nibabel/benchmarks/bench_fileslice.py b/nibabel/benchmarks/bench_fileslice.py index 764e0390b5..8763784dc6 100644 --- a/nibabel/benchmarks/bench_fileslice.py +++ b/nibabel/benchmarks/bench_fileslice.py @@ -3,13 +3,9 @@ import nibabel as nib nib.bench() -If you have doctests enabled by default in nose (with a noserc file or -environment variable), and you have a numpy version <= 1.6.1, this will also -run the doctests, let's hope they pass. +Run this benchmark with:: -Run this benchmark with: - - nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_fileslice.py + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_fileslice.py """ import sys diff --git a/nibabel/benchmarks/bench_finite_range.py b/nibabel/benchmarks/bench_finite_range.py index 6aa9d9d861..1ca2bf95d0 100644 --- a/nibabel/benchmarks/bench_finite_range.py +++ b/nibabel/benchmarks/bench_finite_range.py @@ -5,13 +5,9 @@ import nibabel as nib nib.bench() -If you have doctests enabled by default in nose (with a noserc file or -environment variable), and you have a numpy version <= 1.6.1, this will also -run the doctests, let's hope they pass. +Run this benchmark with:: -Run this benchmark with: - - nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_finite_range + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_finite_range.py """ import sys diff --git a/nibabel/benchmarks/bench_load_save.py b/nibabel/benchmarks/bench_load_save.py index 59198eac1a..46118df43e 100644 --- a/nibabel/benchmarks/bench_load_save.py +++ b/nibabel/benchmarks/bench_load_save.py @@ -5,13 +5,9 @@ import nibabel as nib nib.bench() -If you have doctests enabled by default in nose (with a noserc file or -environment variable), and you have a numpy version <= 1.6.1, this will also -run the doctests, let's hope they pass. +Run this benchmark with:: -Run this benchmark with: - - nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_load_save.py + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_load_save.py """ import sys diff --git a/nibabel/benchmarks/bench_streamlines.py b/nibabel/benchmarks/bench_streamlines.py index fc1e39f8ad..5c49c9e177 100644 --- a/nibabel/benchmarks/bench_streamlines.py +++ b/nibabel/benchmarks/bench_streamlines.py @@ -5,13 +5,9 @@ import nibabel as nib nib.bench() -If you have doctests enabled by default in nose (with a noserc file or -environment variable), and you have a numpy version <= 1.6.1, this will also run -the doctests, let's hope they pass. +Run this benchmark with:: -Run this benchmark with: - - nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_streamlines.py + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_streamlines.py """ import numpy as np From 154b941e58337a11662d527cba79cd6cfc8eb147 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 18 Feb 2020 12:30:38 -0500 Subject: [PATCH 662/689] TEST: Final few assert_raises --- nibabel/cmdline/tests/test_utils.py | 6 ++---- nibabel/tests/test_wrapstruct.py | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/nibabel/cmdline/tests/test_utils.py b/nibabel/cmdline/tests/test_utils.py index eb864c62c0..460f0d40d6 100644 --- a/nibabel/cmdline/tests/test_utils.py +++ b/nibabel/cmdline/tests/test_utils.py @@ -5,8 +5,6 @@ Test running scripts """ -from numpy.testing import assert_raises - import pytest import nibabel as nib @@ -196,10 +194,10 @@ def test_main(): -7.24879837e+00]).astype(dtype="float32")]), ('DATA(md5)', ['0a2576dd6badbb25bfb3b12076df986b', 'b0abbc492b4fd533b2c80d82570062cf'])]) - with assert_raises(SystemExit): + with pytest.raises(SystemExit): np.testing.assert_equal(main(test_names, StringIO()), expected_difference) test_names_2 = [pjoin(data_path, f) for f in ('standard.nii.gz', 'standard.nii.gz')] - with assert_raises(SystemExit): + with pytest.raises(SystemExit): assert main(test_names_2, StringIO()) == "These files are identical." diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index 883c0ec147..d56873d414 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -476,14 +476,14 @@ def test_log_checks(self): assert fhdr['an_integer'] == 1 assert (message == 'an_integer should be 1; set an_integer to 1') - assert_raises(*raiser) + pytest.raises(*raiser) # lower case string hdr = HC() hdr['a_str'] = 'Hello' # severity = 20 fhdr, message, raiser = self.log_chk(hdr, 20) assert (message == 'a_str should be lower case; ' 'set a_str to lower case') - assert_raises(*raiser) + pytest.raises(*raiser) def test_logger_error(self): # Check that we can reset the logger and error level From f043012dc7edd34830cc89cc2ce1d736e2ea74d7 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 18 Feb 2020 12:24:57 -0500 Subject: [PATCH 663/689] CI: Add OSX test to Travis --- .travis.yml | 39 ++++++++++++++++++++++++++------------- 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/.travis.yml b/.travis.yml index 622a19db32..ebbb67904d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -57,15 +57,28 @@ matrix: - python: 3.7 env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" + # OSX Python support is basically accidental. Take whatever version we can + # get and test with full dependencies... + - os: osx + language: minimal + # and pre-releases. No min-requirements.txt because we can't assume a wheel that old. + - os: osx + language: minimal + env: + - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" + # Test that PyPI installs from source pass - python: 3.5 env: - INSTALL_TYPE=sdist + # Wheels (binary distributions) - python: 3.5 env: - INSTALL_TYPE=wheel + # Install from git archive (e.g., https://github.com/nipy/nibabel/archive/master.zip) - python: 3.5 env: - INSTALL_TYPE=archive + # Run flake8... Might not be needed now we have pep8speaks - python: 3.5 env: - CHECK_TYPE="style" @@ -76,42 +89,42 @@ matrix: # Set up virtual environment, build package, build from depends before_install: - - travis_retry python -m pip install --upgrade pip virtualenv - - virtualenv --python=python venv + - travis_retry python3 -m pip install --upgrade pip virtualenv + - virtualenv --python=python3 venv - source venv/bin/activate - - python --version # just to check - - travis_retry pip install -U $SETUP_REQUIRES + - python3 --version # just to check + - travis_retry python3 -m pip install -U $SETUP_REQUIRES - | if [ "$INSTALL_TYPE" == "sdist" ]; then - python setup.py egg_info # check egg_info while we're here - python setup.py sdist + python3 setup.py egg_info # check egg_info while we're here + python3 setup.py sdist export ARCHIVE=$( ls dist/*.tar.gz ) elif [ "$INSTALL_TYPE" == "wheel" ]; then - python setup.py bdist_wheel + python3 setup.py bdist_wheel export ARCHIVE=$( ls dist/*.whl ) elif [ "$INSTALL_TYPE" == "archive" ]; then export ARCHIVE="package.tar.gz" git archive -o $ARCHIVE HEAD fi - - if [ -n "$DEPENDS" ]; then pip install $EXTRA_PIP_FLAGS $DEPENDS; fi + - if [ -n "$DEPENDS" ]; then python3 -m pip install $EXTRA_PIP_FLAGS $DEPENDS; fi # command to install dependencies install: - | if [ "$INSTALL_TYPE" == "setup" ]; then - python setup.py install + python3 setup.py install else - pip install $EXTRA_PIP_FLAGS $ARCHIVE + python3 -m pip install $EXTRA_PIP_FLAGS $ARCHIVE fi # Basic import check - - python -c 'import nibabel; print(nibabel.__version__)' + - python3 -c 'import nibabel; print(nibabel.__version__)' - if [ "$CHECK_TYPE" == "skiptests" ]; then exit 0; fi before_script: # Point to nibabel data directory - export NIBABEL_DATA_DIR="$PWD/nibabel-data" # Because nibabel is already installed, will just look up the extra - - pip install $EXTRA_PIP_FLAGS "nibabel[$CHECK_TYPE]" + - python3 -m pip install $EXTRA_PIP_FLAGS "nibabel[$CHECK_TYPE]" # command to run tests, e.g. python setup.py test script: @@ -134,7 +147,7 @@ script: fi after_script: - - travis_retry pip install codecov + - travis_retry python3 -m pip install codecov - codecov notifications: From c2a8b52bc51d7b1b080fc31d3f9ab256a3299bd1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 14:40:59 -0500 Subject: [PATCH 664/689] CI: Fix wheel build... weird pip behavior while in source directory --- .travis.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index ebbb67904d..4e8e1051f1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -94,6 +94,8 @@ before_install: - source venv/bin/activate - python3 --version # just to check - travis_retry python3 -m pip install -U $SETUP_REQUIRES + - which python3 + - which pip - | if [ "$INSTALL_TYPE" == "sdist" ]; then python3 setup.py egg_info # check egg_info while we're here @@ -106,7 +108,7 @@ before_install: export ARCHIVE="package.tar.gz" git archive -o $ARCHIVE HEAD fi - - if [ -n "$DEPENDS" ]; then python3 -m pip install $EXTRA_PIP_FLAGS $DEPENDS; fi + - if [ -n "$DEPENDS" ]; then pip install $EXTRA_PIP_FLAGS $DEPENDS; fi # command to install dependencies install: @@ -114,7 +116,7 @@ install: if [ "$INSTALL_TYPE" == "setup" ]; then python3 setup.py install else - python3 -m pip install $EXTRA_PIP_FLAGS $ARCHIVE + pip install $EXTRA_PIP_FLAGS $ARCHIVE fi # Basic import check - python3 -c 'import nibabel; print(nibabel.__version__)' From 22d8c65ff0b01ca7a1c398eed7915eb2968e0bef Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 16:36:59 -0500 Subject: [PATCH 665/689] FIX: Return pytest.main results from nibabel.test/bench --- nibabel/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 7c096a0033..32e2fa7009 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -139,7 +139,7 @@ def test(label=None, verbose=1, extra_argv=None, args.extend(["--pyargs", "nibabel"]) - pytest.main(args=args) + return pytest.main(args=args) def bench(label=None, verbose=1, extra_argv=None): @@ -170,4 +170,4 @@ def bench(label=None, verbose=1, extra_argv=None): if extra_argv is not None: args.extend(extra_argv) args.extend(["-c", config]) - test(label, verbose, extra_argv=args) + return test(label, verbose, extra_argv=args) From 49be50a6053d954caf9a599be00e418accd45426 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 20:07:38 -0500 Subject: [PATCH 666/689] TEST: Might as well test the testers. Found a bug. --- nibabel/__init__.py | 16 ++++------- nibabel/tests/test_init.py | 59 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 10 deletions(-) create mode 100644 nibabel/tests/test_init.py diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 32e2fa7009..f99e9e0b06 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -116,15 +116,11 @@ def test(label=None, verbose=1, extra_argv=None, if label is not None: raise NotImplementedError("Labels cannot be set at present") - try: - verbose = int(verbose) - except ValueError: - pass - else: - if verbose > 0: - args.append("-" + "v" * verbose) - elif verbose < 0: - args.append("-" + "q" * -verbose) + verbose = int(verbose) + if verbose > 0: + args.append("-" + "v" * verbose) + elif verbose < 0: + args.append("-" + "q" * -verbose) if extra_argv: args.extend(extra_argv) @@ -132,7 +128,7 @@ def test(label=None, verbose=1, extra_argv=None, args.append("--doctest-modules") if coverage: args.extend(["--cov", "nibabel"]) - if raise_warnings: + if raise_warnings is not None: raise NotImplementedError("Warning filters are not implemented") if timer: raise NotImplementedError("Timing is not implemented") diff --git a/nibabel/tests/test_init.py b/nibabel/tests/test_init.py new file mode 100644 index 0000000000..97f440497e --- /dev/null +++ b/nibabel/tests/test_init.py @@ -0,0 +1,59 @@ +import nibabel as nib +from pkg_resources import resource_filename +import pytest +from unittest import mock + +@pytest.mark.parametrize("verbose, v_args", [(-2, ["-qq"]), + (-1, ["-q"]), + (0, []), + (1, ["-v"]), + (2, ["-vv"])]) +@pytest.mark.parametrize("doctests", (True, False)) +@pytest.mark.parametrize("coverage", (True, False)) +def test_nibabel_test(verbose, v_args, doctests, coverage): + expected_args = v_args + ["--doctest-modules", "--cov", "nibabel", "--pyargs", "nibabel"] + if not doctests: + expected_args.remove("--doctest-modules") + if not coverage: + expected_args[-4:-2] = [] + + with mock.patch("pytest.main") as pytest_main: + nib.test(verbose=verbose, doctests=doctests, coverage=coverage) + + args, kwargs = pytest_main.call_args + assert args == () + assert kwargs == {"args": expected_args} + + +def test_nibabel_test_errors(): + with pytest.raises(NotImplementedError): + nib.test(label="fast") + with pytest.raises(NotImplementedError): + nib.test(raise_warnings=[]) + with pytest.raises(NotImplementedError): + nib.test(timer=True) + with pytest.raises(ValueError): + nib.test(verbose="-v") + + +def test_nibabel_bench(): + expected_args = ["-c", "--pyargs", "nibabel"] + + try: + expected_args.insert(1, resource_filename("nibabel", "benchmarks/pytest.benchmark.ini")) + except: + raise unittest.SkipTest("Not installed") + + with mock.patch("pytest.main") as pytest_main: + nib.bench(verbose=0) + + args, kwargs = pytest_main.call_args + assert args == () + assert kwargs == {"args": expected_args} + + with mock.patch("pytest.main") as pytest_main: + nib.bench(verbose=0, extra_argv=[]) + + args, kwargs = pytest_main.call_args + assert args == () + assert kwargs == {"args": expected_args} From c049a4e09ea0b3ffbb68174dc473a019bfe0360a Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 20:11:06 -0500 Subject: [PATCH 667/689] CI: Install test requirements through extra dependencies --- .azure-pipelines/windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 970a3b788d..8b7e990fe2 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -30,7 +30,6 @@ jobs: displayName: 'Update build tools' - script: | python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS% - python -m pip install nose coverage codecov pytest pytest-cov displayName: 'Install dependencies' - script: | python -m pip install .[$(CHECK_TYPE)] @@ -44,6 +43,7 @@ jobs: displayName: 'Pytest tests' condition: and(succeeded(), eq(variables['CHECK_TYPE'], 'test')) - script: | + python -m pip install codecov cd for_testing codecov displayName: 'Upload To Codecov' From 592b31049b7140181dd25cb6e1f2ce22e87afac0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 20:12:03 -0500 Subject: [PATCH 668/689] DOC: Specify current benchmark file --- nibabel/benchmarks/bench_array_to_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/benchmarks/bench_array_to_file.py b/nibabel/benchmarks/bench_array_to_file.py index 776a93000c..ee0d25044d 100644 --- a/nibabel/benchmarks/bench_array_to_file.py +++ b/nibabel/benchmarks/bench_array_to_file.py @@ -7,7 +7,7 @@ Run this benchmark with:: - pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_load_save.py + pytest -c /benchmarks/pytest.benchmark.ini /benchmarks/bench_array_to_file.py """ import sys From 7392e8fadf57f90f92adacbe0e93c4054670016c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 23:30:28 -0500 Subject: [PATCH 669/689] TEST: Style and parameterization fixes for readability --- nibabel/cifti2/tests/test_cifti2io_header.py | 2 +- nibabel/tests/test_arraywriters.py | 6 +- nibabel/tests/test_files_interface.py | 2 +- nibabel/tests/test_image_load_save.py | 35 +--- nibabel/tests/test_nifti1.py | 171 ++++++++++--------- nibabel/tests/test_nifti2.py | 10 +- nibabel/tests/test_optpkg.py | 5 +- nibabel/tests/test_orientations.py | 30 ++-- nibabel/tests/test_parrec.py | 6 +- nibabel/tests/test_processing.py | 24 +-- nibabel/tests/test_proxy_api.py | 8 +- nibabel/tests/test_scaling.py | 23 +-- nibabel/tests/test_scripts.py | 3 +- nibabel/tests/test_spatialimages.py | 3 +- nibabel/tests/test_testing.py | 5 +- nibabel/tests/test_tripwire.py | 2 +- nibabel/tests/test_wrapstruct.py | 19 +-- 17 files changed, 153 insertions(+), 201 deletions(-) diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 0cbf167809..0fef5ccd78 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -255,7 +255,7 @@ def test_read_geometry(): [ 0, 0, 0, 1]] expected_dimensions = (91, 109, 91) assert (geometry_mapping.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix == - expected_affine).all() + expected_affine).all() assert geometry_mapping.volume.volume_dimensions == expected_dimensions diff --git a/nibabel/tests/test_arraywriters.py b/nibabel/tests/test_arraywriters.py index 380126d4d4..9268c3fe36 100644 --- a/nibabel/tests/test_arraywriters.py +++ b/nibabel/tests/test_arraywriters.py @@ -610,12 +610,12 @@ def test_dumber_writers(): aw.slope = 2.0 assert aw.slope == 2.0 with pytest.raises(AttributeError): - getattr(aw, 'inter') + aw.inter aw = ArrayWriter(arr) with pytest.raises(AttributeError): - getattr(aw, 'slope') + aw.slope with pytest.raises(AttributeError): - getattr(aw, 'inter') + aw.inter # Attempt at scaling should raise error for dumb type with pytest.raises(WriterError): ArrayWriter(arr, np.int16) diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index da91aaf03a..d3c895618e 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -57,7 +57,7 @@ def test_files_interface(): assert img.get_filename() == 'test.nii' assert img.file_map['image'].filename == 'test.nii' with pytest.raises(KeyError): - img.file_map.__getitem__('header') + img.file_map['header'] # pair - note new class img = Nifti1Pair(arr, aff) img.set_filename('test') diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 25c5b3e1de..429144108c 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -137,8 +137,7 @@ def test_save_load(): del re_img2 spm99.save(img, sifn) re_img3 = nils.load(sifn) - assert isinstance(re_img3, - spm99.Spm99AnalyzeImage) + assert isinstance(re_img3, spm99.Spm99AnalyzeImage) assert_array_equal(re_img3.get_fdata(), data) assert_array_equal(re_img3.affine, affine) ni1.save(re_img3, nifn) @@ -301,30 +300,14 @@ def wat(hdr): def test_guessed_image_type(): # Test whether we can guess the image type from example files - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'example4d.nii.gz')) == - Nifti1Image) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'nifti1.hdr')) == - Nifti1Pair) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'example_nifti2.nii.gz')) == - Nifti2Image) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'nifti2.hdr')) == - Nifti2Pair) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'tiny.mnc')) == - Minc1Image) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'small.mnc')) == - Minc2Image) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'test.mgz')) == - MGHImage) - assert (nils.guessed_image_type( - pjoin(DATA_PATH, 'analyze.hdr')) == - Spm2AnalyzeImage) + assert nils.guessed_image_type(pjoin(DATA_PATH, 'example4d.nii.gz')) == Nifti1Image + assert nils.guessed_image_type(pjoin(DATA_PATH, 'nifti1.hdr')) == Nifti1Pair + assert nils.guessed_image_type(pjoin(DATA_PATH, 'example_nifti2.nii.gz')) == Nifti2Image + assert nils.guessed_image_type(pjoin(DATA_PATH, 'nifti2.hdr')) == Nifti2Pair + assert nils.guessed_image_type(pjoin(DATA_PATH, 'tiny.mnc')) == Minc1Image + assert nils.guessed_image_type(pjoin(DATA_PATH, 'small.mnc')) == Minc2Image + assert nils.guessed_image_type(pjoin(DATA_PATH, 'test.mgz')) == MGHImage + assert nils.guessed_image_type(pjoin(DATA_PATH, 'analyze.hdr')) == Spm2AnalyzeImage def test_fail_save(): diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 6ee1926d8d..75f261c48e 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -62,43 +62,7 @@ A[:3, :3] = np.array(R) * Z # broadcasting does the job A[:3, 3] = T -HDE = HeaderDataError -header_examples_list = \ - [ - ((None, None), None, (None, None), (np.nan, np.nan)), - ((np.nan, None), None, (None, None), (np.nan, np.nan)), - ((None, np.nan), None, (None, None), (np.nan, np.nan)), - ((np.nan, np.nan), None, (None, None), (np.nan, np.nan)), - # Can only be one null - ((None, 0), HDE, (None, None), (np.nan, 0)), - ((np.nan, 0), HDE, (None, None), (np.nan, 0)), - ((1, None), HDE, (None, None), (1, np.nan)), - ((1, np.nan), HDE, (None, None), (1, np.nan)), - # Bad slope plus anything generates an error - ((0, 0), HDE, (None, None), (0, 0)), - ((0, None), HDE, (None, None), (0, np.nan)), - ((0, np.nan), HDE, (None, None), (0, np.nan)), - ((0, np.inf), HDE, (None, None), (0, np.inf)), - ((0, -np.inf), HDE, (None, None), (0, -np.inf)), - ((np.inf, 0), HDE, (None, None), (np.inf, 0)), - ((np.inf, None), HDE, (None, None), (np.inf, np.nan)), - ((np.inf, np.nan), HDE, (None, None), (np.inf, np.nan)), - ((np.inf, np.inf), HDE, (None, None), (np.inf, np.inf)), - ((np.inf, -np.inf), HDE, (None, None), (np.inf, -np.inf)), - ((-np.inf, 0), HDE, (None, None), (-np.inf, 0)), - ((-np.inf, None), HDE, (None, None), (-np.inf, np.nan)), - ((-np.inf, np.nan), HDE, (None, None), (-np.inf, np.nan)), - ((-np.inf, np.inf), HDE, (None, None), (-np.inf, np.inf)), - ((-np.inf, -np.inf), HDE, (None, None), (-np.inf, -np.inf)), - # Good slope and bad inter generates error for get_slope_inter - ((2, None), HDE, HDE, (2, np.nan)), - ((2, np.nan), HDE, HDE, (2, np.nan)), - ((2, np.inf), HDE, HDE, (2, np.inf)), - ((2, -np.inf), HDE, HDE, (2, -np.inf)), - # Good slope and inter - you guessed it - ((2, 0), None, (2, 0), (2, 0)), - ((2, 1), None, (2, 1), (2, 1)) - ] + class TestNifti1PairHeader(tana.TestAnalyzeHeader, tspm.HeaderScalingMixin): header_class = Nifti1PairHeader example_file = header_file @@ -174,9 +138,44 @@ def test_big_scaling(self): def test_slope_inter(self): hdr = self.header_class() + nan, inf, minf = np.nan, np.inf, -np.inf + HDE = HeaderDataError assert hdr.get_slope_inter() == (1.0, 0.0) - for in_tup, exp_err, out_tup, raw_values in header_examples_list: + for in_tup, exp_err, out_tup, raw_values in ( # Null scalings + ((None, None), None, (None, None), (nan, nan)), + ((nan, None), None, (None, None), (nan, nan)), + ((None, nan), None, (None, None), (nan, nan)), + ((nan, nan), None, (None, None), (nan, nan)), + # Can only be one null + ((None, 0), HDE, (None, None), (nan, 0)), + ((nan, 0), HDE, (None, None), (nan, 0)), + ((1, None), HDE, (None, None), (1, nan)), + ((1, nan), HDE, (None, None), (1, nan)), + # Bad slope plus anything generates an error + ((0, 0), HDE, (None, None), (0, 0)), + ((0, None), HDE, (None, None), (0, nan)), + ((0, nan), HDE, (None, None), (0, nan)), + ((0, inf), HDE, (None, None), (0, inf)), + ((0, minf), HDE, (None, None), (0, minf)), + ((inf, 0), HDE, (None, None), (inf, 0)), + ((inf, None), HDE, (None, None), (inf, nan)), + ((inf, nan), HDE, (None, None), (inf, nan)), + ((inf, inf), HDE, (None, None), (inf, inf)), + ((inf, minf), HDE, (None, None), (inf, minf)), + ((minf, 0), HDE, (None, None), (minf, 0)), + ((minf, None), HDE, (None, None), (minf, nan)), + ((minf, nan), HDE, (None, None), (minf, nan)), + ((minf, inf), HDE, (None, None), (minf, inf)), + ((minf, minf), HDE, (None, None), (minf, minf)), + # Good slope and bad inter generates error for get_slope_inter + ((2, None), HDE, HDE, (2, nan)), + ((2, nan), HDE, HDE, (2, nan)), + ((2, inf), HDE, HDE, (2, inf)), + ((2, minf), HDE, HDE, (2, minf)), + # Good slope and inter - you guessed it + ((2, 0), None, (2, 0), (2, 0)), + ((2, 1), None, (2, 1), (2, 1))): hdr = self.header_class() if not exp_err is None: with pytest.raises(exp_err): @@ -193,8 +192,7 @@ def test_slope_inter(self): # Check set survives through checking hdr = self.header_class.from_header(hdr, check=True) assert hdr.get_slope_inter() == out_tup - assert_array_equal([hdr['scl_slope'], hdr['scl_inter']], - raw_values) + assert_array_equal([hdr['scl_slope'], hdr['scl_inter']], raw_values) def test_nifti_qfac_checks(self): # Test qfac is 1 or -1 @@ -208,9 +206,7 @@ def test_nifti_qfac_checks(self): hdr['pixdim'][0] = 0 fhdr, message, raiser = self.log_chk(hdr, 20) assert fhdr['pixdim'][0] == 1 - assert (message == - 'pixdim[0] (qfac) should be 1 ' - '(default) or -1; setting qfac to 1') + assert message == 'pixdim[0] (qfac) should be 1 (default) or -1; setting qfac to 1' def test_nifti_qsform_checks(self): # qform, sform checks @@ -220,14 +216,12 @@ def test_nifti_qsform_checks(self): hdr['qform_code'] = -1 fhdr, message, raiser = self.log_chk(hdr, 30) assert fhdr['qform_code'] == 0 - assert (message == - 'qform_code -1 not valid; setting to 0') + assert message == 'qform_code -1 not valid; setting to 0' hdr = HC() hdr['sform_code'] = -1 fhdr, message, raiser = self.log_chk(hdr, 30) assert fhdr['sform_code'] == 0 - assert (message == - 'sform_code -1 not valid; setting to 0') + assert message == 'sform_code -1 not valid; setting to 0' def test_nifti_xform_codes(self): # Verify that all xform codes can be set in both qform and sform @@ -254,8 +248,8 @@ def test_magic_offset_checks(self): fhdr, message, raiser = self.log_chk(hdr, 45) assert fhdr['magic'] == b'ooh' assert (message == - 'magic string "ooh" is not valid; ' - 'leaving as is, but future errors are likely') + 'magic string "ooh" is not valid; ' + 'leaving as is, but future errors are likely') # For pairs, any offset is OK, but should be divisible by 16 # Singles need offset of at least 352 (nifti1) or 540 (nifti2) bytes, # with the divide by 16 rule @@ -271,18 +265,18 @@ def test_magic_offset_checks(self): fhdr, message, raiser = self.log_chk(hdr, 30) assert fhdr['vox_offset'] == bad_spm assert (message == - 'vox offset (={0:g}) not divisible by 16, ' - 'not SPM compatible; leaving at current ' - 'value'.format(bad_spm)) + 'vox offset (={0:g}) not divisible by 16, ' + 'not SPM compatible; leaving at current ' + 'value'.format(bad_spm)) # Check minimum offset (if offset set) hdr['magic'] = hdr.single_magic hdr['vox_offset'] = 10 fhdr, message, raiser = self.log_chk(hdr, 40) assert fhdr['vox_offset'] == hdr.single_vox_offset assert (message == - 'vox offset 10 too low for single ' - 'file nifti1; setting to minimum value ' - 'of ' + str(hdr.single_vox_offset)) + 'vox offset 10 too low for single ' + 'file nifti1; setting to minimum value ' + 'of ' + str(hdr.single_vox_offset)) def test_freesurfer_large_vector_hack(self): # For large vector images, Freesurfer appears to set dim[1] to -1 and @@ -315,10 +309,14 @@ def test_freesurfer_large_vector_hack(self): assert hdr.get_data_shape() == (too_big, 1, 1, 4) assert_array_equal(hdr['dim'][:5], np.array([4, -1, 1, 1, 4])) # This only works when the first 3 dimensions are -1, 1, 1 - for args in [(too_big,), (too_big, 1), (too_big, 1, 2), (too_big, 2, 1), - (1, too_big), (1, too_big, 1), (1, 1, too_big), (1, 1, 1, too_big)]: - with pytest.raises(HeaderDataError): - hdr.set_data_shape(args) + pytest.raises(HeaderDataError, hdr.set_data_shape, (too_big,)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (too_big, 1)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (too_big, 1, 2)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (too_big, 2, 1)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, too_big)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, too_big, 1)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, 1, too_big)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, 1, 1, too_big)) # Outside range of glmin raises error far_too_big = int(np.iinfo(glmin).max) + 1 with suppress_warnings(): @@ -350,14 +348,14 @@ def test_freesurfer_ico7_hack(self): assert hdr.get_data_shape() == full_shape[:dim] assert_array_equal(hdr._structarr['dim'], expected_dim) # Only works on dimensions >= 3 - for args in [ - # Only works on dimensions >= 3 - full_shape[:1], full_shape[:2], - # Bad shapes - (163842, 2, 1), (163842, 1, 2), (1, 163842, 1), - (1, 1, 163842), (1, 1, 1, 163842)]: - with pytest.raises(HeaderDataError): - hdr.set_data_shape(args) + pytest.raises(HeaderDataError, hdr.set_data_shape, full_shape[:1]) + pytest.raises(HeaderDataError, hdr.set_data_shape, full_shape[:2]) + # Bad shapes + pytest.raises(HeaderDataError, hdr.set_data_shape, (163842, 2, 1)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (163842, 1, 2)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, 163842, 1)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, 1, 163842)) + pytest.raises(HeaderDataError, hdr.set_data_shape, (1, 1, 1, 163842)) # Test consistency of data in .mgh and mri_convert produced .nii nitest_path = os.path.join(get_nibabel_data(), 'nitest-freesurfer') mgh = mghload(os.path.join(nitest_path, 'fsaverage', 'surf', @@ -538,26 +536,26 @@ def test_slice_times(self): # The following examples are from the nifti1.h documentation. hdr['slice_code'] = slice_order_codes['sequential increasing'] assert (_print_me(hdr.get_slice_times()) == - ['0.0', '0.1', '0.2', '0.3', '0.4', '0.5', '0.6']) + ['0.0', '0.1', '0.2', '0.3', '0.4', '0.5', '0.6']) hdr['slice_start'] = 1 hdr['slice_end'] = 5 assert (_print_me(hdr.get_slice_times()) == - [None, '0.0', '0.1', '0.2', '0.3', '0.4', None]) + [None, '0.0', '0.1', '0.2', '0.3', '0.4', None]) hdr['slice_code'] = slice_order_codes['sequential decreasing'] assert (_print_me(hdr.get_slice_times()) == - [None, '0.4', '0.3', '0.2', '0.1', '0.0', None]) + [None, '0.4', '0.3', '0.2', '0.1', '0.0', None]) hdr['slice_code'] = slice_order_codes['alternating increasing'] assert (_print_me(hdr.get_slice_times()) == - [None, '0.0', '0.3', '0.1', '0.4', '0.2', None]) + [None, '0.0', '0.3', '0.1', '0.4', '0.2', None]) hdr['slice_code'] = slice_order_codes['alternating decreasing'] assert (_print_me(hdr.get_slice_times()) == - [None, '0.2', '0.4', '0.1', '0.3', '0.0', None]) + [None, '0.2', '0.4', '0.1', '0.3', '0.0', None]) hdr['slice_code'] = slice_order_codes['alternating increasing 2'] assert (_print_me(hdr.get_slice_times()) == - [None, '0.2', '0.0', '0.3', '0.1', '0.4', None]) + [None, '0.2', '0.0', '0.3', '0.1', '0.4', None]) hdr['slice_code'] = slice_order_codes['alternating decreasing 2'] assert (_print_me(hdr.get_slice_times()) == - [None, '0.4', '0.1', '0.3', '0.0', '0.2', None]) + [None, '0.4', '0.1', '0.3', '0.0', '0.2', None]) # test set hdr = self.header_class() hdr.set_dim_info(slice=2) @@ -608,8 +606,7 @@ def test_slice_times(self): def test_intents(self): ehdr = self.header_class() ehdr.set_intent('t test', (10,), name='some score') - assert (ehdr.get_intent() == - ('t test', (10.0,), 'some score')) + assert ehdr.get_intent() == ('t test', (10.0,), 'some score') # unknown intent name or code - unknown name will fail even when # allow_unknown=True with pytest.raises(KeyError): @@ -636,8 +633,7 @@ def test_intents(self): assert ehdr.get_intent() == ('unknown code 9999', (), '') assert ehdr.get_intent('code') == (9999, (), '') ehdr.set_intent(9999, name='custom intent', allow_unknown=True) - assert (ehdr.get_intent() == - ('unknown code 9999', (), 'custom intent')) + assert ehdr.get_intent() == ('unknown code 9999', (), 'custom intent') assert ehdr.get_intent('code') == (9999, (), 'custom intent') # store unknown intent with parameters. set_intent will set the # parameters, but get_intent won't return them @@ -655,10 +651,16 @@ def test_set_slice_times(self): hdr.set_dim_info(slice=2) hdr.set_data_shape([1, 1, 7]) hdr.set_slice_duration(0.1) - for times in [[0] * 6, [None] * 7, [None, 0, 1, None, 3, 4, None], - [None, 0, 1, 2.1, 3, 4, None], [None, 0, 4, 3, 2, 1, None]]: - with pytest.raises(HeaderDataError): - hdr.set_slice_times(times) + times = [0] * 6 + pytest.raises(HeaderDataError, hdr.set_slice_times, times) + times = [None] * 7 + pytest.raises(HeaderDataError, hdr.set_slice_times, times) + times = [None, 0, 1, None, 3, 4, None] + pytest.raises(HeaderDataError, hdr.set_slice_times, times) + times = [None, 0, 1, 2.1, 3, 4, None] + pytest.raises(HeaderDataError, hdr.set_slice_times, times) + times = [None, 0, 4, 3, 2, 1, None] + pytest.raises(HeaderDataError, hdr.set_slice_times, times) times = [0, 1, 2, 3, 4, 5, 6] hdr.set_slice_times(times) assert hdr['slice_code'] == 1 @@ -711,8 +713,7 @@ def test_recoded_fields(self): assert hdr.get_value_label('intent_code') == 't test' assert hdr.get_value_label('slice_code') == 'unknown' hdr['slice_code'] = 4 # alternating decreasing - assert (hdr.get_value_label('slice_code') == - 'alternating decreasing') + assert hdr.get_value_label('slice_code') == 'alternating decreasing' def unshear_44(affine): @@ -999,7 +1000,7 @@ def test_load_save(self): assert_array_equal(img3.get_fdata(), data) assert img3.header == img.header assert isinstance(np.asanyarray(img3.dataobj), - np.memmap if ext == '' else np.ndarray) + np.memmap if ext == '' else np.ndarray) # del to avoid windows errors of form 'The process cannot # access the file because it is being used' del img3 diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index d0daf9632e..ca6e7d8125 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -49,16 +49,14 @@ def test_eol_check(self): hdr['eol_check'] = 0 fhdr, message, raiser = self.log_chk(hdr, 20) assert_array_equal(fhdr['eol_check'], good_eol) - assert (message == - 'EOL check all 0; ' - 'setting EOL check to 13, 10, 26, 10') + assert message == 'EOL check all 0; setting EOL check to 13, 10, 26, 10' hdr['eol_check'] = (13, 10, 0, 10) fhdr, message, raiser = self.log_chk(hdr, 40) assert_array_equal(fhdr['eol_check'], good_eol) assert (message == - 'EOL check not 0 or 13, 10, 26, 10; ' - 'data may be corrupted by EOL conversion; ' - 'setting EOL check to 13, 10, 26, 10') + 'EOL check not 0 or 13, 10, 26, 10; ' + 'data may be corrupted by EOL conversion; ' + 'setting EOL check to 13, 10, 26, 10') class TestNifti2PairHeader(_Nifti2Mixin, TestNifti1PairHeader): diff --git a/nibabel/tests/test_optpkg.py b/nibabel/tests/test_optpkg.py index 1e652a51c5..925180ce6b 100644 --- a/nibabel/tests/test_optpkg.py +++ b/nibabel/tests/test_optpkg.py @@ -26,7 +26,7 @@ def assert_bad(pkg_name, min_version=None): assert not have_pkg assert isinstance(pkg, TripWire) with pytest.raises(TripWireError): - getattr(pkg, 'a_method') + pkg.a_method with pytest.raises(SkipTest): setup() @@ -77,7 +77,6 @@ def test_versions(): try: pkg.some_method except TripWireError as err: - assert (str(err) == - 'These functions need _a_fake_package version >= 3.0') + assert str(err) == 'These functions need _a_fake_package version >= 3.0' finally: del sys.modules[fake_name] diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 226feee526..a3ad215488 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -267,25 +267,25 @@ def test_ornt2axcodes(): # Recoding orientation to axis codes labels = (('left', 'right'), ('back', 'front'), ('down', 'up')) assert ornt2axcodes([[0, 1], - [1, 1], - [2, 1]], labels) == ('right', 'front', 'up') + [1, 1], + [2, 1]], labels) == ('right', 'front', 'up') assert ornt2axcodes([[0, -1], - [1, -1], - [2, -1]], labels) == ('left', 'back', 'down') + [1, -1], + [2, -1]], labels) == ('left', 'back', 'down') assert ornt2axcodes([[2, -1], - [1, -1], - [0, -1]], labels) == ('down', 'back', 'left') + [1, -1], + [0, -1]], labels) == ('down', 'back', 'left') assert ornt2axcodes([[1, 1], - [2, -1], - [0, 1]], labels) == ('front', 'down', 'right') + [2, -1], + [0, 1]], labels) == ('front', 'down', 'right') # default is RAS output directions assert ornt2axcodes([[0, 1], - [1, 1], - [2, 1]]) == ('R', 'A', 'S') + [1, 1], + [2, 1]]) == ('R', 'A', 'S') # dropped axes produce None assert ornt2axcodes([[0, 1], - [np.nan, np.nan], - [2, 1]]) == ('R', None, 'S') + [np.nan, np.nan], + [2, 1]]) == ('R', None, 'S') # Non integer axes raises error with pytest.raises(ValueError): ornt2axcodes([[0.1, 1]]) @@ -365,10 +365,8 @@ def test_axcodes2ornt(): def test_aff2axcodes(): assert aff2axcodes(np.eye(4)) == tuple('RAS') aff = [[0, 1, 0, 10], [-1, 0, 0, 20], [0, 0, 1, 30], [0, 0, 0, 1]] - assert (aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))) == - ('B', 'R', 'U')) - assert (aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))) == - ('B', 'R', 'U')) + assert aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))) == ('B', 'R', 'U') + assert aff2axcodes(aff, (('L', 'R'), ('B', 'F'), ('D', 'U'))) == ('B', 'R', 'U') def test_inv_ornt_aff(): diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 4c55179672..d39f2a097f 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -345,8 +345,7 @@ def test_sorting_multiple_echos_and_contrasts(): np.arange(1, nslices+1)) current_echo = slice_offset % nechos + 1 # same echo for each slice in the group - assert (np.all(sorted_echos[istart:iend] == current_echo) == - True) + assert np.all(sorted_echos[istart:iend] == current_echo) # outermost sort index is image_type_mr assert np.all(sorted_types[:ntotal//4] == 0) assert np.all(sorted_types[ntotal//4:ntotal//2] == 1) @@ -403,8 +402,7 @@ def test_sorting_multiecho_ASL(): # check volume labels vol_labels = asl_hdr.get_volume_labels() - assert (list(vol_labels.keys()) == - ['echo number', 'label type', 'dynamic scan number']) + assert list(vol_labels.keys()) == ['echo number', 'label type', 'dynamic scan number'] assert_array_equal(vol_labels['dynamic scan number'], [1]*6 + [2]*6) assert_array_equal(vol_labels['label type'], [1]*3 + [2]*3 + [1]*3 + [2]*3) assert_array_equal(vol_labels['echo number'], [1, 2, 3]*4) diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index 1dd64c6a5c..1e9e94091e 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -287,17 +287,11 @@ def test_resample_to_output(): img_ni1 = Nifti2Image(data, np.eye(4)) img_ni2 = Nifti2Image(data, np.eye(4)) # Default is Nifti1Image - assert ( - resample_to_output(img_ni2).__class__ == - Nifti1Image) + assert resample_to_output(img_ni2).__class__ == Nifti1Image # Can be overriden - assert ( - resample_to_output(img_ni1, out_class=Nifti2Image).__class__ == - Nifti2Image) + assert resample_to_output(img_ni1, out_class=Nifti2Image).__class__ == Nifti2Image # None specifies out_class from input - assert ( - resample_to_output(img_ni2, out_class=None).__class__ == - Nifti2Image) + assert resample_to_output(img_ni2, out_class=None).__class__ == Nifti2Image @needs_scipy @@ -347,17 +341,11 @@ def test_smooth_image(): img_ni1 = Nifti2Image(data, np.eye(4)) img_ni2 = Nifti2Image(data, np.eye(4)) # Default is Nifti1Image - assert ( - smooth_image(img_ni2, 0).__class__ == - Nifti1Image) + assert smooth_image(img_ni2, 0).__class__ == Nifti1Image # Can be overriden - assert ( - smooth_image(img_ni1, 0, out_class=Nifti2Image).__class__ == - Nifti2Image) + assert smooth_image(img_ni1, 0, out_class=Nifti2Image).__class__ == Nifti2Image # None specifies out_class from input - assert ( - smooth_image(img_ni2, 0, out_class=None).__class__ == - Nifti2Image) + assert smooth_image(img_ni2, 0, out_class=None).__class__ == Nifti2Image @needs_scipy diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index a5a9e9e051..cccd7b729f 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -104,7 +104,7 @@ def validate_shape(self, pmaker, params): assert_array_equal(prox.shape, params['shape']) # Read only with pytest.raises(AttributeError): - setattr(prox, 'shape', params['shape']) + prox.shape = params['shape'] def validate_ndim(self, pmaker, params): # Check shape @@ -112,7 +112,7 @@ def validate_ndim(self, pmaker, params): assert prox.ndim == len(params['shape']) # Read only with pytest.raises(AttributeError): - setattr(prox, 'ndim', len(params['shape'])) + prox.ndim = len(params['shape']) def validate_is_proxy(self, pmaker, params): # Check shape @@ -122,7 +122,7 @@ def validate_is_proxy(self, pmaker, params): assert not is_proxy(np.arange(10)) # Read only with pytest.raises(AttributeError): - setattr(prox, 'is_proxy', False) + prox.is_proxy = False def validate_asarray(self, pmaker, params): # Check proxy returns expected array from asarray @@ -311,7 +311,7 @@ def validate_dtype(self, pmaker, params): prox, fio, hdr = pmaker() assert_dt_equal(prox.dtype, params['dtype']) with pytest.raises(AttributeError): - prox.__setattr__('dtype', np.dtype(prox.dtype)) + prox.dtype = np.dtype(prox.dtype) def validate_slope_inter_offset(self, pmaker, params): # Check slope, inter, offset diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index e916770dab..5855d0837a 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -131,15 +131,15 @@ def test_a2f_nan2zero(): assert_array_equal(data_back, [np.array(np.nan).astype(np.int32), 99]) -@pytest.mark.parametrize("in_type, out_type, err", [ - (np.int16, np.int16, None), - (np.int16, np.int8, None), - (np.uint16, np.uint8, None), - (np.int32, np.int8, None), - (np.float32, np.uint8, None), - (np.float32, np.int16, None) +@pytest.mark.parametrize("in_type, out_type", [ + (np.int16, np.int16), + (np.int16, np.int8), + (np.uint16, np.uint8), + (np.int32, np.int8), + (np.float32, np.uint8), + (np.float32, np.int16) ]) -def test_array_file_scales(in_type, out_type, err): +def test_array_file_scales(in_type, out_type): # Test scaling works for max, min when going from larger to smaller type, # and from float to integer. bio = BytesIO() @@ -147,10 +147,6 @@ def test_array_file_scales(in_type, out_type, err): arr = np.zeros((3,), dtype=in_type) info = type_info(in_type) arr[0], arr[1] = info['min'], info['max'] - if not err is None: - with pytest.raises(err): - _calculate_scale(arr, out_dtype, True) - return slope, inter, mn, mx = _calculate_scale(arr, out_dtype, True) array_to_file(arr, bio, out_type, 0, inter, slope, mn, mx) bio.seek(0) @@ -225,5 +221,4 @@ def check_int_a2f(in_type, out_type): # Clip at extremes to remove inf info = type_info(in_type) out_min, out_max = info['min'], info['max'] - assert np.allclose(big_floater(data), - big_floater(np.clip(data_back, out_min, out_max))) + assert np.allclose(big_floater(data), big_floater(np.clip(data_back, out_min, out_max))) diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 1ed6870f07..d15403a881 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -412,8 +412,7 @@ def test_parrec2nii_with_data(): for line in csvreader: nlines += 1 - assert sorted(csv_keys) == ['diffusion b value number', - 'gradient orientation number'] + assert sorted(csv_keys) == ['diffusion b value number', 'gradient orientation number'] assert nlines == 8 # 8 volumes present in DTI.PAR diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index a2a8ddc79d..58b41d5822 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -435,8 +435,7 @@ def test_slicer(self): sliceobj = [slice(None, None, 2)] * 3 + \ [slice(None)] * (len(dshape) - 3) downsampled_img = img.slicer[tuple(sliceobj)] - assert (downsampled_img.header.get_zooms()[:3] - == np.array(spatial_zooms) * 2).all() + assert (downsampled_img.header.get_zooms()[:3] == np.array(spatial_zooms) * 2).all() max4d = (hasattr(img.header, '_structarr') and 'dims' in img.header._structarr.dtype.fields and diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index 32b77fac6c..b27775a81a 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -165,9 +165,8 @@ def test_assert_re_in_exception(args): def test_test_data(): assert test_data() == data_path - assert (test_data() == - os.path.abspath(os.path.join(os.path.dirname(__file__), - '..', 'tests', 'data'))) + assert test_data() == os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', 'tests', 'data')) for subdir in ('nicom', 'gifti', 'externals'): assert test_data(subdir) == os.path.join(data_path[:-10], subdir, 'tests', 'data') assert os.path.exists(test_data(subdir)) diff --git a/nibabel/tests/test_tripwire.py b/nibabel/tests/test_tripwire.py index b69e913d4b..2ec3e06182 100644 --- a/nibabel/tests/test_tripwire.py +++ b/nibabel/tests/test_tripwire.py @@ -14,7 +14,7 @@ def test_tripwire(): # Test tripwire object silly_module_name = TripWire('We do not have silly_module_name') with pytest.raises(TripWireError): - getattr(silly_module_name, 'do_silly_thing') + silly_module_name.do_silly_thing # Check AttributeError can be checked too try: silly_module_name.__wrapped__ diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index d56873d414..26e04dd8f9 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -165,7 +165,7 @@ def test_to_from_fileobj(self): def test_mappingness(self): hdr = self.header_class() with pytest.raises(ValueError): - hdr.__setitem__('nonexistent key', 0.1) + hdr['nonexistent key'] = 0.1 hdr_dt = hdr.structarr.dtype keys = hdr.keys() assert keys == list(hdr) @@ -201,7 +201,7 @@ def test_endianness_ro(self): ''' hdr = self.header_class() with pytest.raises(AttributeError): - hdr.__setattr__('endianness', '<') + hdr.endianness = '<' def test_endian_guess(self): # Check guesses of endian @@ -231,7 +231,7 @@ def test_structarr(self): hdr.structarr # That it's read only with pytest.raises(AttributeError): - hdr.__setattr__('structarr', 0) + hdr.structarr = 0 def log_chk(self, hdr, level): return log_chk(hdr, level) @@ -347,8 +347,7 @@ class MyHdr(self.header_class): # Speculating that we can set code value 0 or 1 new_code = 1 if code == 0 else 0 hdr[key] = new_code - assert (hdr.get_value_label(key) == - ''.format(new_code)) + assert hdr.get_value_label(key) == ''.format(new_code) class MyWrapStruct(WrapStruct): @@ -474,15 +473,13 @@ def test_log_checks(self): fhdr, message, raiser = self.log_chk(hdr, 40) return assert fhdr['an_integer'] == 1 - assert (message == - 'an_integer should be 1; set an_integer to 1') + assert message == 'an_integer should be 1; set an_integer to 1' pytest.raises(*raiser) # lower case string hdr = HC() hdr['a_str'] = 'Hello' # severity = 20 fhdr, message, raiser = self.log_chk(hdr, 20) - assert (message == 'a_str should be lower case; ' - 'set a_str to lower case') + assert message == 'a_str should be lower case; set a_str to lower case' pytest.raises(*raiser) def test_logger_error(self): @@ -502,9 +499,7 @@ def test_logger_error(self): # Check log message appears in new logger imageglobals.logger = logger hdr.copy().check_fix() - assert (str_io.getvalue() == - 'a_str should be lower case; ' - 'set a_str to lower case\n') + assert str_io.getvalue() == 'a_str should be lower case; set a_str to lower case\n' # Check that error_level in fact causes error to be raised imageglobals.error_level = 20 with pytest.raises(HeaderDataError): From a7f8e11e84d222c84afa89ebf32099f2a4ed04cb Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 23:30:45 -0500 Subject: [PATCH 670/689] TEST: Bad test syntax --- nibabel/tests/test_nifti1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 75f261c48e..9b4747bd5d 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -451,7 +451,7 @@ def test_datatypes(self): if dt == np.void: continue hdr.set_data_dtype(code) - assert hdr.get_data_dtype(), data_type_codes.dtype[code] + assert hdr.get_data_dtype() == data_type_codes.dtype[code] # Check that checks also see new datatypes hdr.set_data_dtype(np.complex128) hdr.check_fix() From 60616510758961c3b03858da84b75b0ee5b94e0a Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 23:31:30 -0500 Subject: [PATCH 671/689] TEST/RF: Mock compress_ext_map, instead of fixture --- nibabel/tests/test_openers.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 43712d11bf..85a8f4a0a7 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -18,6 +18,7 @@ from ..tmpdirs import InTemporaryDirectory from ..volumeutils import BinOpener +import unittest from unittest import mock import pytest from ..testing import error_warnings @@ -160,19 +161,8 @@ def test_Opener_gzip_type(): with patch_indexed_gzip(igzip_present): assert isinstance(Opener(fname, **kwargs).fobj, expected) -@pytest.fixture(scope="class") -def image_opener_setup(request): - compress_ext_map = ImageOpener.compress_ext_map.copy() - request.cls.compress_ext_map = compress_ext_map - - def teardown(): - ImageOpener.compress_ext_map = request.cls.compress_ext_map - request.addfinalizer(teardown) - - -@pytest.mark.usefixtures("image_opener_setup") -class TestImageOpener: +class TestImageOpener(unittest.TestCase): def test_vanilla(self): # Test that ImageOpener does add '.mgz' as gzipped file type with InTemporaryDirectory(): @@ -181,6 +171,7 @@ def test_vanilla(self): with ImageOpener('test.mgz', 'w') as fobj: assert hasattr(fobj.fobj, 'compress') + @mock.patch.dict('nibabel.openers.ImageOpener.compress_ext_map') def test_new_association(self): def file_opener(fileish, mode): return open(fileish, mode) From a98b023177bf94eaee791aa312e6cd8c57776d5f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 23:32:18 -0500 Subject: [PATCH 672/689] TEST: Rename tests to ensure running --- nibabel/tests/test_recoder.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nibabel/tests/test_recoder.py b/nibabel/tests/test_recoder.py index 8e4edd1b87..d6206df978 100644 --- a/nibabel/tests/test_recoder.py +++ b/nibabel/tests/test_recoder.py @@ -49,7 +49,7 @@ def test_recoder_3(): with pytest.raises(AttributeError): rc.label -def test_recoder_3(): +def test_recoder_4(): # with explicit column names codes = ((1, 'one'), (2, 'two')) rc = Recoder(codes, ['code1', 'label']) @@ -61,7 +61,7 @@ def test_recoder_3(): assert rc.label['one'] == 'one' -def test_recoder_4(): +def test_recoder_5(): # code, label, aliases codes = ((1, 'one', '1', 'first'), (2, 'two')) rc = Recoder(codes) # just with implicit alias @@ -70,7 +70,7 @@ def test_recoder_4(): assert rc.code['first'] == 1 -def test_recoder_5(): +def test_recoder_6(): # with explicit column names codes = ((1, 'one', '1', 'first'), (2, 'two')) rc = Recoder(codes, ['code1', 'label']) From 5657d01669fa3b9662dd5e51044e53ae96509cd8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Feb 2020 23:33:38 -0500 Subject: [PATCH 673/689] TEST/RF: Reparametrize tests --- nibabel/tests/test_scaling.py | 38 +++++++++++++++-------------------- nibabel/tests/test_testing.py | 25 +++++++++-------------- 2 files changed, 25 insertions(+), 38 deletions(-) diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index 5855d0837a..f314e6b572 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -157,31 +157,25 @@ def test_array_file_scales(in_type, out_type): assert np.all(np.abs(arr - arr3) <= max_miss) -@pytest.mark.parametrize("in_type, out_type",[ - ('int', 'int'), - ('uint', 'int'), -]) -def test_scaling_in_abstract(in_type, out_type): +@pytest.mark.parametrize("category0, category1, overflow",[ # Confirm that, for all ints and uints as input, and all possible outputs, # for any simple way of doing the calculation, the result is near enough - for in_tp in np.sctypes[in_type]: - for out_tp in np.sctypes[out_type]: - check_int_a2f(in_tp, out_tp) - - -@pytest.mark.parametrize("in_type, out_type", [ - ('float', 'int'), - ('float', 'uint'), - ('complex', 'int'), - ('complex', 'uint'), -]) -def test_scaling_in_abstract_warn(in_type, out_type): - + ('int', 'int', False), + ('uint', 'int', False), # Converting floats to integer - for in_tp in np.sctypes[in_type]: - for out_tp in np.sctypes[out_type]: - with suppress_warnings(): # overflow - check_int_a2f(in_tp, out_tp) + ('float', 'int', True), + ('float', 'uint', True), + ('complex', 'int', True), + ('complex', 'uint', True), +]) +def test_scaling_in_abstract(category0, category1, overflow): + for in_type in np.sctypes[category0]: + for out_type in np.sctypes[category1]: + if overflow: + with suppress_warnings(): + check_int_a2f(in_type, out_type) + else: + check_int_a2f(in_type, out_type) def check_int_a2f(in_type, out_type): diff --git a/nibabel/tests/test_testing.py b/nibabel/tests/test_testing.py index b27775a81a..bbe83c6973 100644 --- a/nibabel/tests/test_testing.py +++ b/nibabel/tests/test_testing.py @@ -136,31 +136,24 @@ def f(): with pytest.raises(ValueError): f() -@pytest.mark.parametrize("args", [ +@pytest.mark.parametrize("regex, entries", [ [".*", ""], [".*", ["any"]], ["ab", "abc"], # Sufficient to have one entry matching ["ab", ["", "abc", "laskdjf"]], # Tuples should be ok too - ["ab", ("", "abc", "laskdjf")] -]) -def test_assert_re_in(args): - assert_re_in(*args) - - -@pytest.mark.parametrize("args", [ + ["ab", ("", "abc", "laskdjf")], # Should do match not search - ["ab", "cab"], - ["ab$", "abc"], - ["ab$", ["ddd", ""]], - ["ab$", ("ddd", "")], + pytest.param("ab", "cab", marks=pytest.mark.xfail), + pytest.param("ab$", "abc", marks=pytest.mark.xfail), + pytest.param("ab$", ["ddd", ""], marks=pytest.mark.xfail), + pytest.param("ab$", ("ddd", ""), marks=pytest.mark.xfail), #Shouldn't "match" the empty list - ["", []] + pytest.param("", [], marks=pytest.mark.xfail) ]) -def test_assert_re_in_exception(args): - with pytest.raises(AssertionError): - assert_re_in(*args) +def test_assert_re_in(regex, entries): + assert_re_in(regex, entries) def test_test_data(): From aec083752813069cb024b7058268c8a712da7ef7 Mon Sep 17 00:00:00 2001 From: Zvi Baratz Date: Thu, 20 Feb 2020 17:29:16 +0200 Subject: [PATCH 674/689] Typo fix imagqes --> images --- doc/source/dicom/dicom_mosaic.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/dicom/dicom_mosaic.rst b/doc/source/dicom/dicom_mosaic.rst index cf597169d3..7e5a157a94 100644 --- a/doc/source/dicom/dicom_mosaic.rst +++ b/doc/source/dicom/dicom_mosaic.rst @@ -67,7 +67,7 @@ The first two values of $\mathbf{s}$ ($s_1, s_2$) are given by the ``PixelSpacing`` field. We get $s_3$ (the slice scaling value) from ``SpacingBetweenSlices``. -The :ref:`spm-dicom` code has a comment saying that mosaic DICOM imagqes +The :ref:`spm-dicom` code has a comment saying that mosaic DICOM images have an incorrect ``ImagePositionPatient`` field. The ``ImagePositionPatient`` field usually gives the $\mathbf{t}$ vector. The comments imply that Siemens has derived ``ImagePositionPatient`` From 0ecbf8d769b7bcefdd281930322cf3aac75ec275 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 4 Mar 2020 22:08:43 -0500 Subject: [PATCH 675/689] CI: Fail test if docs do not build --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 622a19db32..b01dd09709 100644 --- a/.travis.yml +++ b/.travis.yml @@ -121,8 +121,7 @@ script: flake8 nibabel elif [ "${CHECK_TYPE}" == "doc" ]; then cd doc - make html; - make doctest; + make html && make doctest elif [ "${CHECK_TYPE}" == "test" ]; then # Change into an innocuous directory and find tests from installation mkdir for_testing From cc20e28c3214b0db92de44060256f5c93bcafbe1 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 4 Mar 2020 22:25:29 -0500 Subject: [PATCH 676/689] PY3: Drop Python 2 guards, use runpy instead of exec --- doc/source/conf.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 8225f67e8a..d3e75237ab 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -21,10 +21,8 @@ import sys import os -try: - from configparser import ConfigParser -except ImportError: - from ConfigParser import ConfigParser # PY2 +from runpy import run_path +from configparser import ConfigParser # Check for external Sphinx extensions we depend on try: @@ -51,9 +49,7 @@ # -- General configuration ---------------------------------------------------- # We load the nibabel release info into a dict by explicit execution -rel = {} -with open(os.path.join('..', '..', 'nibabel', 'info.py'), 'r') as fobj: - exec(fobj.read(), rel) +rel = run_path(os.path.join('..', '..', 'nibabel', 'info.py')) # Write long description from info with open('_long_description.inc', 'wt') as fobj: @@ -62,10 +58,7 @@ # Load metadata from setup.cfg config = ConfigParser() config.read(os.path.join('..', '..', 'setup.cfg')) -try: - metadata = config['metadata'] -except AttributeError: - metadata = dict(config.items('metadata')) # PY2 +metadata = config['metadata'] # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. From 03c6c31f52068465e21981b885983ee4b5385ec2 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 4 Mar 2020 22:25:36 -0500 Subject: [PATCH 677/689] DOC: Attempt to find versioneer version when building docs --- doc/tools/build_modref_templates.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index 3b988a2135..c3d08ef0b4 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -5,6 +5,7 @@ # stdlib imports import sys import re +import os from os.path import join as pjoin # local imports @@ -48,12 +49,25 @@ def abort(error): installed_version = V(module.__version__) - info_file = pjoin('..', package, 'info.py') - info_lines = open(info_file).readlines() - source_version = '.'.join([v.split('=')[1].strip(" '\n.") - for v in info_lines if re.match( - '^_version_(major|minor|micro|extra)', v - )]) + version_file = pjoin('..', package, '_version.py') + source_version = None + if os.path.exists(version_file): + # Versioneer + from runpy import run_path + try: + source_version = run_path(version_file)['get_versions']()['version'] + except (FileNotFoundError, KeyError): + pass + if source_version == '0+unknown': + source_version = None + if source_version is None: + # Legacy fall-back + info_file = pjoin('..', package, 'info.py') + info_lines = open(info_file).readlines() + source_version = '.'.join([v.split('=')[1].strip(" '\n.") + for v in info_lines if re.match( + '^_version_(major|minor|micro|extra)', v + )]) print('***', source_version) if source_version != installed_version: From 92dc3f1299c56b9de44aa5375e2e816828c158b0 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 5 Mar 2020 08:16:57 -0500 Subject: [PATCH 678/689] DOC: Skip py3k --- doc/tools/build_modref_templates.py | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index c3d08ef0b4..da752b6c42 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -82,6 +82,7 @@ def abort(error): r'.*test.*$', r'\.info.*$', r'\.pkg_info.*$', + r'\.py3k.*$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'index', relative_to=outdir) From b57cf1bbeec3f041c55b579190aa57289ac90445 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 5 Mar 2020 08:18:17 -0500 Subject: [PATCH 679/689] DOC: Fix "``s" pattern in changelog --- Changelog | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Changelog b/Changelog index c79a23f895..1209447219 100644 --- a/Changelog +++ b/Changelog @@ -79,7 +79,7 @@ Enhancements Bug fixes --------- -* Sliced ``Tractogram``s no longer ``apply_affine`` to the original +* Sliced ``Tractogram``\s no longer ``apply_affine`` to the original ``Tractogram``'s streamlines. (pr/811) (MC, reviewed by Serge Koudoro, Philippe Poulin, CM, MB) * Change strings with invalid escapes to raw strings (pr/827) (EL, reviewed @@ -98,7 +98,7 @@ Maintenance API changes and deprecations ---------------------------- * Fully remove deprecated ``checkwarns`` and ``minc`` modules. (pr/852) (CM) -* The ``keep_file_open`` argument to file load operations and ``ArrayProxy``s +* The ``keep_file_open`` argument to file load operations and ``ArrayProxy``\s no longer acccepts the value ``"auto"``, raising a ``ValueError``. (pr/852) (CM) * Deprecate ``ArraySequence.data`` in favor of ``ArraySequence.get_data()``, @@ -420,7 +420,7 @@ New features * Support for MRtrix TCK streamlines file format (pr/486) (MC, reviewed by MB, Arnaud Bore, J-Donald Tournier, Jean-Christophe Houde) * Added ``get_fdata()`` as default method to retrieve scaled floating point - data from ``DataobjImage``s (pr/551) (MB, reviewed by CM, SG) + data from ``DataobjImage``\s (pr/551) (MB, reviewed by CM, SG) Enhancements ------------ From 4ac98210d0ff3887746324dfbab8360103aec250 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 5 Mar 2020 08:19:08 -0500 Subject: [PATCH 680/689] DOC: Address warnings and improve formatting --- nibabel/affines.py | 2 +- nibabel/brikhead.py | 5 +++-- nibabel/cifti2/__init__.py | 2 +- nibabel/cifti2/cifti2.py | 6 +++--- nibabel/cifti2/cifti2_axes.py | 4 ++-- nibabel/gifti/gifti.py | 15 +++++++++------ nibabel/nifti1.py | 24 ++++++++++++------------ nibabel/streamlines/tck.py | 10 ++++------ nibabel/streamlines/tractogram.py | 16 ++++++++-------- 9 files changed, 43 insertions(+), 41 deletions(-) diff --git a/nibabel/affines.py b/nibabel/affines.py index 9a37cc9e49..c2b2a3b1d0 100644 --- a/nibabel/affines.py +++ b/nibabel/affines.py @@ -306,7 +306,7 @@ def obliquity(affine): This implementation is inspired by `AFNI's implementation `_. For further details about *obliquity*, check `AFNI's documentation - _. + `_. Parameters ---------- diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 41bfc54c4d..13bb999f2d 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -250,7 +250,7 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None): a new file handle is created every time the image is accessed. If ``file_like`` refers to an open file handle, this setting has no effect. The default value (``None``) will result in the value of - ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used. + ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. """ super(AFNIArrayProxy, self).__init__(file_like, header, @@ -533,7 +533,7 @@ def from_file_map(klass, file_map, mmap=True, keep_file_open=None): a new file handle is created every time the image is accessed. If ``file_like`` refers to an open file handle, this setting has no effect. The default value (``None``) will result in the value of - ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used. + ``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used. """ with file_map['header'].get_prepare_fileobj('rt') as hdr_fobj: hdr = klass.header_class.from_fileobj(hdr_fobj) @@ -553,6 +553,7 @@ def filespec_to_file_map(klass, filespec): afni.nimh.nih.gov/pub/dist/doc/program_help/README.compression.html. Thus, if you have AFNI files my_image.HEAD and my_image.BRIK.gz and you want to load the AFNI BRIK / HEAD pair, you can specify: + * The HEAD filename - e.g., my_image.HEAD * The BRIK filename w/o compressed extension - e.g., my_image.BRIK * The full BRIK filename - e.g., my_image.BRIK.gz diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py index 9dc6dd68b8..c0933c9041 100644 --- a/nibabel/cifti2/__init__.py +++ b/nibabel/cifti2/__init__.py @@ -26,4 +26,4 @@ Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ, Cifti2Vertices, Cifti2Volume, CIFTI_BRAIN_STRUCTURES, Cifti2HeaderError, CIFTI_MODEL_TYPES, load, save) -from .cifti2_axes import (Axis, BrainModelAxis, ParcelsAxis, SeriesAxis, LabelAxis, ScalarAxis) \ No newline at end of file +from .cifti2_axes import (Axis, BrainModelAxis, ParcelsAxis, SeriesAxis, LabelAxis, ScalarAxis) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index 1a5307eba5..9bac6a0e5b 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -172,7 +172,7 @@ def _to_xml_element(self): class Cifti2LabelTable(xml.XmlSerializable, MutableMapping): - """ CIFTI-2 label table: a sequence of ``Cifti2Label``s + """ CIFTI-2 label table: a sequence of ``Cifti2Label``\s * Description - Used by NamedMap when IndicesMapToDataType is "CIFTI_INDEX_TYPE_LABELS" in order to associate names and display colors @@ -927,8 +927,8 @@ class Cifti2MatrixIndicesMap(xml.XmlSerializable, MutableSequence): * Text Content: [NA] * Parent Element - Matrix - Attribute - --------- + Attributes + ---------- applies_to_matrix_dimension : list of ints Dimensions of this matrix that follow this mapping indices_map_to_data_type : str one of CIFTI_MAP_TYPES diff --git a/nibabel/cifti2/cifti2_axes.py b/nibabel/cifti2/cifti2_axes.py index 05ab84e6ab..c4c47007db 100644 --- a/nibabel/cifti2/cifti2_axes.py +++ b/nibabel/cifti2/cifti2_axes.py @@ -23,7 +23,7 @@ (except for SeriesAxis objects, which have to remain monotonically increasing or decreasing). Creating new CIFTI-2 axes ------------------------ +------------------------- New Axis objects can be constructed by providing a description for what is contained in each row/column of the described tensor. For each Axis sub-class this descriptor is: @@ -250,7 +250,7 @@ def __init__(self, name, voxel=None, vertex=None, affine=None, factory methods: - :py:meth:`~BrainModelAxis.from_mask`: creates surface or volumetric BrainModelAxis axis - from respectively 1D or 3D masks + from respectively 1D or 3D masks - :py:meth:`~BrainModelAxis.from_surface`: creates a surface BrainModelAxis axis The resulting BrainModelAxis axes can be concatenated by adding them together. diff --git a/nibabel/gifti/gifti.py b/nibabel/gifti/gifti.py index b423ec48dd..0497556a2d 100644 --- a/nibabel/gifti/gifti.py +++ b/nibabel/gifti/gifti.py @@ -207,18 +207,21 @@ class GiftiCoordSystem(xml.XmlSerializable): Attributes ---------- dataspace : int - From the spec: "Contains the stereotaxic space of a DataArray's data + From the spec: Contains the stereotaxic space of a DataArray's data prior to application of the transformation matrix. The stereotaxic space should be one of: - NIFTI_XFORM_UNKNOWN - NIFTI_XFORM_SCANNER_ANAT - NIFTI_XFORM_ALIGNED_ANAT - NIFTI_XFORM_TALAIRACH - NIFTI_XFORM_MNI_152" + + - NIFTI_XFORM_UNKNOWN + - NIFTI_XFORM_SCANNER_ANAT + - NIFTI_XFORM_ALIGNED_ANAT + - NIFTI_XFORM_TALAIRACH + - NIFTI_XFORM_MNI_152 + xformspace : int Spec: "Contains the stereotaxic space of a DataArray's data after application of the transformation matrix. See the DataSpace element for a list of stereotaxic spaces." + xform : array-like shape (4, 4) Affine transformation matrix """ diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index 3979f5b96c..352837f86e 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -1775,18 +1775,18 @@ def __init__(self, dataobj, affine, header=None, self._affine2header() # Copy docstring __init__.__doc__ = analyze.AnalyzeImage.__init__.__doc__ + ''' - Notes - ----- - - If both a `header` and an `affine` are specified, and the `affine` does - not match the affine that is in the `header`, the `affine` will be used, - but the ``sform_code`` and ``qform_code`` fields in the header will be - re-initialised to their default values. This is performed on the basis - that, if you are changing the affine, you are likely to be changing the - space to which the affine is pointing. The :meth:`set_sform` and - :meth:`set_qform` methods can be used to update the codes after an image - has been created - see those methods, and the :ref:`manual - ` for more details. ''' + Notes + ----- + + If both a `header` and an `affine` are specified, and the `affine` does + not match the affine that is in the `header`, the `affine` will be used, + but the ``sform_code`` and ``qform_code`` fields in the header will be + re-initialised to their default values. This is performed on the basis + that, if you are changing the affine, you are likely to be changing the + space to which the affine is pointing. The :meth:`set_sform` and + :meth:`set_qform` methods can be used to update the codes after an image + has been created - see those methods, and the :ref:`manual + ` for more details. ''' def update_header(self): ''' Harmonize header with image data and affine diff --git a/nibabel/streamlines/tck.py b/nibabel/streamlines/tck.py index ffcd2e437a..5decf9e831 100644 --- a/nibabel/streamlines/tck.py +++ b/nibabel/streamlines/tck.py @@ -30,9 +30,9 @@ class TckFile(TractogramFile): ----- MRtrix (so its file format: TCK) considers streamlines coordinates to be in world space (RAS+ and mm space). MRtrix refers to that space - as the "real" or "scanner" space [1]_. + as the "real" or "scanner" space [#]_. - Moreover, when streamlines are mapped back to voxel space [2]_, a + Moreover, when streamlines are mapped back to voxel space [#]_, a streamline point located at an integer coordinate (i,j,k) is considered to be at the center of the corresponding voxel. This is in contrast with TRK's internal convention where it would have referred to a corner. @@ -40,10 +40,8 @@ class TckFile(TractogramFile): NiBabel's streamlines internal representation follows the same convention as MRtrix. - References - ---------- - [1] http://www.nitrc.org/pipermail/mrtrix-discussion/2014-January/000859.html - [2] http://nipy.org/nibabel/coordinate_systems.html#voxel-coordinates-are-in-voxel-space + .. [#] http://www.nitrc.org/pipermail/mrtrix-discussion/2014-January/000859.html + .. [#] http://nipy.org/nibabel/coordinate_systems.html#voxel-coordinates-are-in-voxel-space """ # Constants MAGIC_NUMBER = "mrtrix tracks" diff --git a/nibabel/streamlines/tractogram.py b/nibabel/streamlines/tractogram.py index 3d01d8426e..e8ecbac4ff 100644 --- a/nibabel/streamlines/tractogram.py +++ b/nibabel/streamlines/tractogram.py @@ -263,9 +263,9 @@ class Tractogram(object): choice as long as you provide the correct `affine_to_rasmm` matrix, at construction time. When applied to streamlines coordinates, that transformation matrix should bring the streamlines back to world space - (RAS+ and mm space) [1]_. + (RAS+ and mm space) [#]_. - Moreover, when streamlines are mapped back to voxel space [2]_, a + Moreover, when streamlines are mapped back to voxel space [#]_, a streamline point located at an integer coordinate (i,j,k) is considered to be at the center of the corresponding voxel. This is in contrast with other conventions where it might have referred to a corner. @@ -292,8 +292,8 @@ class Tractogram(object): References ---------- - [1] http://nipy.org/nibabel/coordinate_systems.html#naming-reference-spaces - [2] http://nipy.org/nibabel/coordinate_systems.html#voxel-coordinates-are-in-voxel-space + .. [#] http://nipy.org/nibabel/coordinate_systems.html#naming-reference-spaces + .. [#] http://nipy.org/nibabel/coordinate_systems.html#voxel-coordinates-are-in-voxel-space """ def __init__(self, streamlines=None, data_per_streamline=None, @@ -515,9 +515,9 @@ class LazyTractogram(Tractogram): choice as long as you provide the correct `affine_to_rasmm` matrix, at construction time. When applied to streamlines coordinates, that transformation matrix should bring the streamlines back to world space - (RAS+ and mm space) [1]_. + (RAS+ and mm space) [#]_. - Moreover, when streamlines are mapped back to voxel space [2]_, a + Moreover, when streamlines are mapped back to voxel space [#]_, a streamline point located at an integer coordinate (i,j,k) is considered to be at the center of the corresponding voxel. This is in contrast with other conventions where it might have referred to a corner. @@ -553,8 +553,8 @@ class LazyTractogram(Tractogram): References ---------- - [1] http://nipy.org/nibabel/coordinate_systems.html#naming-reference-spaces - [2] http://nipy.org/nibabel/coordinate_systems.html#voxel-coordinates-are-in-voxel-space + .. [#] http://nipy.org/nibabel/coordinate_systems.html#naming-reference-spaces + .. [#] http://nipy.org/nibabel/coordinate_systems.html#voxel-coordinates-are-in-voxel-space """ def __init__(self, streamlines=None, data_per_streamline=None, From d4598dbad9f122f7e76769a60f88c6b280ed2634 Mon Sep 17 00:00:00 2001 From: Zvi Baratz Date: Thu, 20 Feb 2020 17:29:16 +0200 Subject: [PATCH 681/689] Typo fix imagqes --> images --- doc/source/dicom/dicom_mosaic.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/dicom/dicom_mosaic.rst b/doc/source/dicom/dicom_mosaic.rst index cf597169d3..7e5a157a94 100644 --- a/doc/source/dicom/dicom_mosaic.rst +++ b/doc/source/dicom/dicom_mosaic.rst @@ -67,7 +67,7 @@ The first two values of $\mathbf{s}$ ($s_1, s_2$) are given by the ``PixelSpacing`` field. We get $s_3$ (the slice scaling value) from ``SpacingBetweenSlices``. -The :ref:`spm-dicom` code has a comment saying that mosaic DICOM imagqes +The :ref:`spm-dicom` code has a comment saying that mosaic DICOM images have an incorrect ``ImagePositionPatient`` field. The ``ImagePositionPatient`` field usually gives the $\mathbf{t}$ vector. The comments imply that Siemens has derived ``ImagePositionPatient`` From 05662b24f68c3efe386342d7b914eb268876748b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 6 Mar 2020 10:43:39 -0500 Subject: [PATCH 682/689] MNT: Update changelog, author list and Zenodo --- .zenodo.json | 3 +++ Changelog | 15 +++++++++++++++ doc/source/index.rst | 1 + 3 files changed, 19 insertions(+) diff --git a/.zenodo.json b/.zenodo.json index e81655fe8f..17cc83715f 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -323,6 +323,9 @@ "name": "Reddam, Venkateswara Reddy", "orcid": "0000-0001-6817-2966" }, + { + "name": "Baratz, Zvi" + }, { "name": "freec84" } diff --git a/Changelog b/Changelog index 1209447219..3135ff111f 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,21 @@ Eric Larson (EL), Demian Wassermann, and Stephan Gerhard. References like "pr/298" refer to github pull request numbers. +3.0.2 (Monday 9 March 2020) +=========================== + +Bug fixes +--------- +* Attempt to find versioneer version when building docs (pr/894) (CM) +* Delay import of h5py until neded (backport of pr/889) (YOH, reviewed by CM) + +Maintenance +----------- +* Fix typo in documentation (backport of pr/893) (Zvi Baratz, reviewed by CM) +* Set minimum matplotlib to 1.5.3 to ensure wheels are available on all + supported Python versions. (backport of pr/887) (CM) + + 3.0.1 (Monday 27 January 2020) ============================== diff --git a/doc/source/index.rst b/doc/source/index.rst index 82c9606ef5..c57cfcef4e 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -105,6 +105,7 @@ contributed code and discussion (in rough order of appearance): * Dorota Jarecka * Chris Gorgolewski * Benjamin C Darwin +* Zvi Baratz License reprise =============== From b2c42090963613883e1bad8a1b3c86e842e28bef Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Sat, 7 Mar 2020 09:29:36 -0500 Subject: [PATCH 683/689] MNT: Drop pyproject.toml for now --- pyproject.toml | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 pyproject.toml diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 3e61cc01f4..0000000000 --- a/pyproject.toml +++ /dev/null @@ -1,3 +0,0 @@ -[build-system] -# Setuptools version should match setup.py; wheel because pip will insert it noisily -requires = ["setuptools >= 30.3.0", "wheel"] From 4220f1183d6abda52d355a8633deb2efb7cd9b49 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 9 Mar 2020 10:06:41 -0400 Subject: [PATCH 684/689] DOC: Note pyproject.toml removal in changelog --- Changelog | 1 + 1 file changed, 1 insertion(+) diff --git a/Changelog b/Changelog index 3135ff111f..d19daad3b6 100644 --- a/Changelog +++ b/Changelog @@ -38,6 +38,7 @@ Maintenance * Fix typo in documentation (backport of pr/893) (Zvi Baratz, reviewed by CM) * Set minimum matplotlib to 1.5.3 to ensure wheels are available on all supported Python versions. (backport of pr/887) (CM) +* Remove ``pyproject.toml`` for now. (issue/859) (CM) 3.0.1 (Monday 27 January 2020) From ff7c27672e01f2af5addbbf3f8c404d73a990678 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 9 Mar 2020 10:13:22 -0400 Subject: [PATCH 685/689] MNT: 3.0.3dev --- nibabel/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/info.py b/nibabel/info.py index d4ebde9960..d3afdbb1b6 100644 --- a/nibabel/info.py +++ b/nibabel/info.py @@ -12,7 +12,7 @@ # development (pre-release) version. _version_major = 3 _version_minor = 0 -_version_micro = 2 +_version_micro = 3 _version_extra = 'dev' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" From 7a62503fd63b19af71f4b01a1211ba6fcd9d7b4f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 9 Mar 2020 10:52:28 -0400 Subject: [PATCH 686/689] NEP29+1y: Drop Python 3.5 --- setup.cfg | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index a180f71c8d..85aebfee7d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,7 +13,6 @@ classifiers = License :: OSI Approved :: MIT License Operating System :: OS Independent Programming Language :: Python - Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 @@ -28,7 +27,7 @@ provides = nisext [options] -python_requires = >=3.5.1 +python_requires = >=3.6 install_requires = numpy >=1.13 packaging >=14.3 From 65d5fc61545f55a50a45a07fbbaeb99c2dbe6bbb Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 9 Mar 2020 10:53:56 -0400 Subject: [PATCH 687/689] CI: Drop 3.5 tests, set min to 3.6, pre-release to 3.8 --- .travis.yml | 27 +++++++++++++-------------- azure-pipelines.yml | 10 ---------- 2 files changed, 13 insertions(+), 24 deletions(-) diff --git a/.travis.yml b/.travis.yml index 001ac74a5d..fe7fcee141 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,40 +21,39 @@ env: - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" python: - - 3.6 - 3.7 - 3.8 jobs: include: # Basic dependencies only - - python: 3.5 + - python: 3.6 env: - DEPENDS="-r requirements.txt" # Clean install - - python: 3.5 + - python: 3.6 env: - DEPENDS="" - CHECK_TYPE=skiptests # Absolute minimum dependencies - - python: 3.5 + - python: 3.6 env: - SETUP_REQUIRES="setuptools==30.3.0" - DEPENDS="-r min-requirements.txt" # Absolute minimum dependencies plus oldest MPL - - python: 3.5 + - python: 3.6 env: - DEPENDS="-r min-requirements.txt matplotlib==1.5.3" # Minimum pydicom dependency - - python: 3.5 + - python: 3.6 env: - DEPENDS="-r min-requirements.txt pydicom==0.9.9 pillow==2.6" # pydicom master branch - - python: 3.5 + - python: 3.6 env: - DEPENDS="numpy git+https://github.com/pydicom/pydicom.git@master" - # test 3.7 against pre-release builds of everything - - python: 3.7 + # test 3.8 against pre-release builds of everything + - python: 3.8 env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" # OSX Python support is basically accidental. Take whatever version we can @@ -67,23 +66,23 @@ jobs: env: - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" # Test that PyPI installs from source pass - - python: 3.5 + - python: 3.6 env: - INSTALL_TYPE=sdist # Wheels (binary distributions) - - python: 3.5 + - python: 3.6 env: - INSTALL_TYPE=wheel # Install from git archive (e.g., https://github.com/nipy/nibabel/archive/master.zip) - - python: 3.5 + - python: 3.6 env: - INSTALL_TYPE=archive # Run flake8... Might not be needed now we have pep8speaks - - python: 3.5 + - python: 3.6 env: - CHECK_TYPE="style" # Documentation doctests - - python: 3.5 + - python: 3.6 env: - CHECK_TYPE="doc" diff --git a/azure-pipelines.yml b/azure-pipelines.yml index d09c5b7740..b00c54209f 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -6,16 +6,6 @@ jobs: vmImage: windows-2019 matrix: py35-x86: - PYTHON_VERSION: '3.5' - PYTHON_ARCH: 'x86' - py35-x64: - PYTHON_VERSION: '3.5' - PYTHON_ARCH: 'x64' - py35-h5py-check: - PYTHON_VERSION: '3.5' - PYTHON_ARCH: 'x64' - PYTHONHASHSEED: 283137131 - DEPENDS: "h5py==2.9.0" py36-x86: PYTHON_VERSION: '3.6' PYTHON_ARCH: 'x86' From cb8da69460c333094093fdd5c0440930eadfb086 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 2 Mar 2016 22:30:13 -0800 Subject: [PATCH 688/689] ENH: Add parser for Siemens "ASCCONV" text format This format is included in most Siemens DICOM files as well as other MR related files. It contains many important bits of meta data. --- nibabel/nicom/ascconv.py | 60 ++ nibabel/nicom/tests/data/ascconv_sample.txt | 919 ++++++++++++++++++++ nibabel/nicom/tests/test_ascconv.py | 40 + 3 files changed, 1019 insertions(+) create mode 100644 nibabel/nicom/ascconv.py create mode 100644 nibabel/nicom/tests/data/ascconv_sample.txt create mode 100644 nibabel/nicom/tests/test_ascconv.py diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py new file mode 100644 index 0000000000..b63205b2cb --- /dev/null +++ b/nibabel/nicom/ascconv.py @@ -0,0 +1,60 @@ +# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" +Parse the "ASCCONV" meta data format found in a variety of Siemens MR files. +""" +import ast, re +from ..externals import OrderedDict + + +ASCCONV_RE = re.compile( + r'### ASCCONV BEGIN((?:\s*[^=\s]+=[^=\s]+)*) ###\n(.*?)\n### ASCCONV END ###', + flags=re.M | re.S) + + +def parse_ascconv(csa_key, ascconv_str): + '''Parse the 'ASCCONV' format from `input_str`. + + Parameters + ---------- + csa_key : str + The key in the CSA dict for the element containing `input_str`. Should + be 'MrPheonixProtocol' or 'MrProtocol'. + ascconv_str : str + The string we are parsing + + Returns + ------- + prot_dict : OrderedDict + Meta data pulled from the ASCCONV section. + attrs : OrderedDict + Any attributes stored in the 'ASCCONV BEGIN' line + + Raises + ------ + SyntaxError + A line of the ASCCONV section could not be parsed. + ''' + attrs, content = ASCCONV_RE.match(ascconv_str).groups() + attrs = OrderedDict((tuple(x.split('=')) for x in attrs.split())) + if csa_key == 'MrPhoenixProtocol': + str_delim = '""' + elif csa_key == 'MrProtocol': + str_delim = '"' + else: + raise ValueError('Unknown protocol key: %s' % csa_key) + # Normalize string start / end markers to something Python understands + content = content.replace(str_delim, '"""') + ascconv_lines = content.split('\n') + # Use Python's own parser to parse modified ASCCONV assignments + tree = ast.parse(content) + + result = OrderedDict() + for statement in tree.body: + assert isinstance(statement, ast.Assign) + value = ast.literal_eval(statement.value) + # Get LHS string from corresponding text line + key = ascconv_lines[statement.lineno - 1].split('=')[0].strip() + result[key] = value + + return result, attrs diff --git a/nibabel/nicom/tests/data/ascconv_sample.txt b/nibabel/nicom/tests/data/ascconv_sample.txt new file mode 100644 index 0000000000..1fd78f788f --- /dev/null +++ b/nibabel/nicom/tests/data/ascconv_sample.txt @@ -0,0 +1,919 @@ +### ASCCONV BEGIN ### +ulVersion = 0x14b44b6 +tSequenceFileName = ""%SiemensSeq%\ep2d_diff"" +tProtocolName = ""CBU+AF8-DTI+AF8-64D+AF8-1A"" +tReferenceImage0 = ""1.3.12.2.1107.5.2.32.35119.2010011420070434054586384"" +tReferenceImage1 = ""1.3.12.2.1107.5.2.32.35119.2010011420070721803086388"" +tReferenceImage2 = ""1.3.12.2.1107.5.2.32.35119.201001142007109937386392"" +ucScanRegionPosValid = 0x1 +ucTablePositioningMode = 0x1 +sProtConsistencyInfo.tBaselineString = ""N4_VB17A_LATEST_20090307"" +sProtConsistencyInfo.tSystemType = ""092"" +sProtConsistencyInfo.flNominalB0 = 2.89362 +sProtConsistencyInfo.flGMax = 26 +sProtConsistencyInfo.flRiseTime = 5.88 +sProtConsistencyInfo.lMaximumNofRxReceiverChannels = 18 +sGRADSPEC.sEddyCompensationX.aflAmplitude[0] = 0.00141208 +sGRADSPEC.sEddyCompensationX.aflAmplitude[1] = 0.000569241 +sGRADSPEC.sEddyCompensationX.aflAmplitude[2] = -0.000514958 +sGRADSPEC.sEddyCompensationX.aflAmplitude[3] = 0.000499075 +sGRADSPEC.sEddyCompensationX.aflAmplitude[4] = 0.000821246 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[0] = 1.81531 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[1] = 0.995025 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[2] = 0.0492598 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[3] = 0.0194645 +sGRADSPEC.sEddyCompensationX.aflTimeConstant[4] = 0.000499659 +sGRADSPEC.sEddyCompensationY.aflAmplitude[0] = 0.00112797 +sGRADSPEC.sEddyCompensationY.aflAmplitude[1] = -0.000565372 +sGRADSPEC.sEddyCompensationY.aflAmplitude[2] = -0.00182913 +sGRADSPEC.sEddyCompensationY.aflAmplitude[3] = -2.65859e-005 +sGRADSPEC.sEddyCompensationY.aflAmplitude[4] = 0.000601077 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[0] = 1.09142 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[1] = 0.661632 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[2] = 0.446457 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[3] = 0.0118729 +sGRADSPEC.sEddyCompensationY.aflTimeConstant[4] = 0.00134346 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[0] = 0.00221038 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[1] = 0.00592667 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[2] = 0.000254437 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[3] = -8.35135e-005 +sGRADSPEC.sEddyCompensationZ.aflAmplitude[4] = -4.25678e-005 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[0] = 4.32108 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[1] = 0.923398 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[2] = 0.0379209 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[3] = 0.0104227 +sGRADSPEC.sEddyCompensationZ.aflTimeConstant[4] = 0.00199944 +sGRADSPEC.bEddyCompensationValid = 1 +sGRADSPEC.sB0CompensationX.aflAmplitude[0] = -0.0494045 +sGRADSPEC.sB0CompensationX.aflAmplitude[1] = 0.0730311 +sGRADSPEC.sB0CompensationX.aflAmplitude[2] = -0.00670347 +sGRADSPEC.sB0CompensationX.aflTimeConstant[0] = 0.618983 +sGRADSPEC.sB0CompensationX.aflTimeConstant[1] = 0.341914 +sGRADSPEC.sB0CompensationX.aflTimeConstant[2] = 0.002 +sGRADSPEC.sB0CompensationY.aflAmplitude[0] = 0.136281 +sGRADSPEC.sB0CompensationY.aflAmplitude[1] = 0.0376382 +sGRADSPEC.sB0CompensationY.aflAmplitude[2] = -0.0500779 +sGRADSPEC.sB0CompensationY.aflTimeConstant[0] = 0.71999 +sGRADSPEC.sB0CompensationY.aflTimeConstant[1] = 0.00341892 +sGRADSPEC.sB0CompensationY.aflTimeConstant[2] = 0.002 +sGRADSPEC.sB0CompensationZ.aflAmplitude[0] = 0.0776537 +sGRADSPEC.sB0CompensationZ.aflAmplitude[1] = 0.0168151 +sGRADSPEC.sB0CompensationZ.aflAmplitude[2] = -0.0550622 +sGRADSPEC.sB0CompensationZ.aflTimeConstant[0] = 0.669998 +sGRADSPEC.sB0CompensationZ.aflTimeConstant[1] = 0.0213343 +sGRADSPEC.sB0CompensationZ.aflTimeConstant[2] = 0.00186002 +sGRADSPEC.bB0CompensationValid = 1 +sGRADSPEC.sCrossTermCompensationXY.aflAmplitude[0] = -0.00049613 +sGRADSPEC.sCrossTermCompensationXY.aflTimeConstant[0] = 0.562233 +sGRADSPEC.sCrossTermCompensationXZ.aflAmplitude[0] = -0.000499641 +sGRADSPEC.sCrossTermCompensationXZ.aflTimeConstant[0] = 0.693605 +sGRADSPEC.sCrossTermCompensationYX.aflAmplitude[0] = 5.35458e-005 +sGRADSPEC.sCrossTermCompensationYX.aflTimeConstant[0] = 0.598216 +sGRADSPEC.sCrossTermCompensationYZ.aflAmplitude[0] = 0.0004678 +sGRADSPEC.sCrossTermCompensationYZ.aflTimeConstant[0] = 0.705977 +sGRADSPEC.sCrossTermCompensationZX.aflAmplitude[0] = -0.000529382 +sGRADSPEC.sCrossTermCompensationZX.aflTimeConstant[0] = 0.551175 +sGRADSPEC.sCrossTermCompensationZY.aflAmplitude[0] = 8.74925e-005 +sGRADSPEC.sCrossTermCompensationZY.aflTimeConstant[0] = 0.890761 +sGRADSPEC.bCrossTermCompensationValid = 1 +sGRADSPEC.lOffsetX = -7806 +sGRADSPEC.lOffsetY = -8833 +sGRADSPEC.lOffsetZ = -2097 +sGRADSPEC.bOffsetValid = 1 +sGRADSPEC.lDelayX = 14 +sGRADSPEC.lDelayY = 14 +sGRADSPEC.lDelayZ = 10 +sGRADSPEC.bDelayValid = 1 +sGRADSPEC.flSensitivityX = 7.95149e-005 +sGRADSPEC.flSensitivityY = 7.82833e-005 +sGRADSPEC.flSensitivityZ = 9.09015e-005 +sGRADSPEC.bSensitivityValid = 1 +sGRADSPEC.flGSWDMinRiseTime = 9.88 +sGRADSPEC.alShimCurrent[0] = 867 +sGRADSPEC.alShimCurrent[1] = 80 +sGRADSPEC.alShimCurrent[2] = -61 +sGRADSPEC.alShimCurrent[3] = -4 +sGRADSPEC.alShimCurrent[4] = -16 +sGRADSPEC.bShimCurrentValid = 1 +sGRADSPEC.ucMode = 0x11 +sTXSPEC.asNucleusInfo[0].tNucleus = ""1H"" +sTXSPEC.asNucleusInfo[0].lFrequency = 123251815 +sTXSPEC.asNucleusInfo[0].bFrequencyValid = 1 +sTXSPEC.asNucleusInfo[0].flReferenceAmplitude = 384.855 +sTXSPEC.asNucleusInfo[0].bReferenceAmplitudeValid = 1 +sTXSPEC.asNucleusInfo[0].flAmplitudeCorrection = 1 +sTXSPEC.asNucleusInfo[0].bAmplitudeCorrectionValid = 1 +sTXSPEC.asNucleusInfo[0].bRFPAIndexValid = 1 +sTXSPEC.asNucleusInfo[1].bFrequencyValid = 1 +sTXSPEC.asNucleusInfo[1].bReferenceAmplitudeValid = 1 +sTXSPEC.asNucleusInfo[1].flAmplitudeCorrection = 1 +sTXSPEC.asNucleusInfo[1].bAmplitudeCorrectionValid = 1 +sTXSPEC.asNucleusInfo[1].lRFPAIndex = -1 +sTXSPEC.asNucleusInfo[1].bRFPAIndexValid = 1 +sTXSPEC.aRFPULSE[0].tName = ""ExtExciteRF"" +sTXSPEC.aRFPULSE[0].bAmplitudeValid = 0x1 +sTXSPEC.aRFPULSE[0].flAmplitude = 357.891 +sTXSPEC.aRFPULSE[1].tName = ""CSatCSatNS"" +sTXSPEC.aRFPULSE[1].bAmplitudeValid = 0x1 +sTXSPEC.aRFPULSE[1].flAmplitude = 94.871 +sTXSPEC.aRFPULSE[2].tName = ""SLoopFCSatNS"" +sTXSPEC.aRFPULSE[2].bAmplitudeValid = 0x1 +sTXSPEC.aRFPULSE[2].flAmplitude = 94.871 +sTXSPEC.lNoOfTraPulses = 3 +sTXSPEC.lBCExcitationMode = 1 +sTXSPEC.lBCSeqExcitationMode = 4 +sTXSPEC.flKDynMagnitudeMin = 0.5 +sTXSPEC.flKDynMagnitudeMax = 1.5 +sTXSPEC.flKDynMagnitudeClipLow = 1 +sTXSPEC.flKDynMagnitudeClipHigh = 1 +sTXSPEC.flKDynPhaseMax = 0.698132 +sTXSPEC.flKDynPhaseClip = 0.174533 +sTXSPEC.bKDynValid = 1 +sTXSPEC.ucRFPulseType = 0x2 +sTXSPEC.ucExcitMode = 0x1 +sTXSPEC.ucSimultaneousExcitation = 0x1 +sTXSPEC.ucBCExcitationModeValid = 0x1 +sRXSPEC.lGain = 1 +sRXSPEC.bGainValid = 1 +sRXSPEC.alDwellTime[0] = 2800 +sAdjData.uiAdjFreMode = 0x1 +sAdjData.uiAdjShimMode = 0x2 +sAdjData.uiAdjWatSupMode = 0x1 +sAdjData.uiAdjRFMapMode = 0x1 +sAdjData.uiAdjMDSMode = 0x1 +sAdjData.uiAdjTableTolerance = 0x1 +sAdjData.uiAdjProtID = 0x56 +sAdjData.uiAdjFreProtRelated = 0x1 +sAdjData.sAdjVolume.sPosition.dCor = -19.66101724 +sAdjData.sAdjVolume.sPosition.dTra = -8.81356001 +sAdjData.sAdjVolume.sNormal.dCor = 0.005235963828 +sAdjData.sAdjVolume.sNormal.dTra = 0.9999862922 +sAdjData.sAdjVolume.dThickness = 144 +sAdjData.sAdjVolume.dPhaseFOV = 230 +sAdjData.sAdjVolume.dReadoutFOV = 230 +ucEnableNoiseAdjust = 0x1 +alTR[0] = 6600000 +alTI[0] = 2500000 +lContrasts = 1 +alTE[0] = 93000 +acFlowComp[0] = 1 +lCombinedEchoes = 1 +sSliceArray.asSlice[0].sPosition.dCor = -20.03015269 +sSliceArray.asSlice[0].sPosition.dTra = -79.31259361 +sSliceArray.asSlice[0].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[0].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[0].dThickness = 2.5 +sSliceArray.asSlice[0].dPhaseFOV = 230 +sSliceArray.asSlice[0].dReadoutFOV = 230 +sSliceArray.asSlice[1].sPosition.dCor = -20.0144448 +sSliceArray.asSlice[1].sPosition.dTra = -76.31263473 +sSliceArray.asSlice[1].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[1].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[1].dThickness = 2.5 +sSliceArray.asSlice[1].dPhaseFOV = 230 +sSliceArray.asSlice[1].dReadoutFOV = 230 +sSliceArray.asSlice[2].sPosition.dCor = -19.99873691 +sSliceArray.asSlice[2].sPosition.dTra = -73.31267586 +sSliceArray.asSlice[2].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[2].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[2].dThickness = 2.5 +sSliceArray.asSlice[2].dPhaseFOV = 230 +sSliceArray.asSlice[2].dReadoutFOV = 230 +sSliceArray.asSlice[3].sPosition.dCor = -19.98302902 +sSliceArray.asSlice[3].sPosition.dTra = -70.31271698 +sSliceArray.asSlice[3].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[3].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[3].dThickness = 2.5 +sSliceArray.asSlice[3].dPhaseFOV = 230 +sSliceArray.asSlice[3].dReadoutFOV = 230 +sSliceArray.asSlice[4].sPosition.dCor = -19.96732113 +sSliceArray.asSlice[4].sPosition.dTra = -67.3127581 +sSliceArray.asSlice[4].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[4].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[4].dThickness = 2.5 +sSliceArray.asSlice[4].dPhaseFOV = 230 +sSliceArray.asSlice[4].dReadoutFOV = 230 +sSliceArray.asSlice[5].sPosition.dCor = -19.95161324 +sSliceArray.asSlice[5].sPosition.dTra = -64.31279923 +sSliceArray.asSlice[5].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[5].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[5].dThickness = 2.5 +sSliceArray.asSlice[5].dPhaseFOV = 230 +sSliceArray.asSlice[5].dReadoutFOV = 230 +sSliceArray.asSlice[6].sPosition.dCor = -19.93590535 +sSliceArray.asSlice[6].sPosition.dTra = -61.31284035 +sSliceArray.asSlice[6].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[6].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[6].dThickness = 2.5 +sSliceArray.asSlice[6].dPhaseFOV = 230 +sSliceArray.asSlice[6].dReadoutFOV = 230 +sSliceArray.asSlice[7].sPosition.dCor = -19.92019745 +sSliceArray.asSlice[7].sPosition.dTra = -58.31288147 +sSliceArray.asSlice[7].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[7].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[7].dThickness = 2.5 +sSliceArray.asSlice[7].dPhaseFOV = 230 +sSliceArray.asSlice[7].dReadoutFOV = 230 +sSliceArray.asSlice[8].sPosition.dCor = -19.90448956 +sSliceArray.asSlice[8].sPosition.dTra = -55.3129226 +sSliceArray.asSlice[8].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[8].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[8].dThickness = 2.5 +sSliceArray.asSlice[8].dPhaseFOV = 230 +sSliceArray.asSlice[8].dReadoutFOV = 230 +sSliceArray.asSlice[9].sPosition.dCor = -19.88878167 +sSliceArray.asSlice[9].sPosition.dTra = -52.31296372 +sSliceArray.asSlice[9].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[9].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[9].dThickness = 2.5 +sSliceArray.asSlice[9].dPhaseFOV = 230 +sSliceArray.asSlice[9].dReadoutFOV = 230 +sSliceArray.asSlice[10].sPosition.dCor = -19.87307378 +sSliceArray.asSlice[10].sPosition.dTra = -49.31300484 +sSliceArray.asSlice[10].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[10].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[10].dThickness = 2.5 +sSliceArray.asSlice[10].dPhaseFOV = 230 +sSliceArray.asSlice[10].dReadoutFOV = 230 +sSliceArray.asSlice[11].sPosition.dCor = -19.85736589 +sSliceArray.asSlice[11].sPosition.dTra = -46.31304597 +sSliceArray.asSlice[11].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[11].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[11].dThickness = 2.5 +sSliceArray.asSlice[11].dPhaseFOV = 230 +sSliceArray.asSlice[11].dReadoutFOV = 230 +sSliceArray.asSlice[12].sPosition.dCor = -19.841658 +sSliceArray.asSlice[12].sPosition.dTra = -43.31308709 +sSliceArray.asSlice[12].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[12].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[12].dThickness = 2.5 +sSliceArray.asSlice[12].dPhaseFOV = 230 +sSliceArray.asSlice[12].dReadoutFOV = 230 +sSliceArray.asSlice[13].sPosition.dCor = -19.8259501 +sSliceArray.asSlice[13].sPosition.dTra = -40.31312821 +sSliceArray.asSlice[13].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[13].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[13].dThickness = 2.5 +sSliceArray.asSlice[13].dPhaseFOV = 230 +sSliceArray.asSlice[13].dReadoutFOV = 230 +sSliceArray.asSlice[14].sPosition.dCor = -19.81024221 +sSliceArray.asSlice[14].sPosition.dTra = -37.31316934 +sSliceArray.asSlice[14].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[14].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[14].dThickness = 2.5 +sSliceArray.asSlice[14].dPhaseFOV = 230 +sSliceArray.asSlice[14].dReadoutFOV = 230 +sSliceArray.asSlice[15].sPosition.dCor = -19.79453432 +sSliceArray.asSlice[15].sPosition.dTra = -34.31321046 +sSliceArray.asSlice[15].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[15].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[15].dThickness = 2.5 +sSliceArray.asSlice[15].dPhaseFOV = 230 +sSliceArray.asSlice[15].dReadoutFOV = 230 +sSliceArray.asSlice[16].sPosition.dCor = -19.77882643 +sSliceArray.asSlice[16].sPosition.dTra = -31.31325158 +sSliceArray.asSlice[16].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[16].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[16].dThickness = 2.5 +sSliceArray.asSlice[16].dPhaseFOV = 230 +sSliceArray.asSlice[16].dReadoutFOV = 230 +sSliceArray.asSlice[17].sPosition.dCor = -19.76311854 +sSliceArray.asSlice[17].sPosition.dTra = -28.31329271 +sSliceArray.asSlice[17].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[17].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[17].dThickness = 2.5 +sSliceArray.asSlice[17].dPhaseFOV = 230 +sSliceArray.asSlice[17].dReadoutFOV = 230 +sSliceArray.asSlice[18].sPosition.dCor = -19.74741065 +sSliceArray.asSlice[18].sPosition.dTra = -25.31333383 +sSliceArray.asSlice[18].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[18].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[18].dThickness = 2.5 +sSliceArray.asSlice[18].dPhaseFOV = 230 +sSliceArray.asSlice[18].dReadoutFOV = 230 +sSliceArray.asSlice[19].sPosition.dCor = -19.73170276 +sSliceArray.asSlice[19].sPosition.dTra = -22.31337495 +sSliceArray.asSlice[19].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[19].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[19].dThickness = 2.5 +sSliceArray.asSlice[19].dPhaseFOV = 230 +sSliceArray.asSlice[19].dReadoutFOV = 230 +sSliceArray.asSlice[20].sPosition.dCor = -19.71599486 +sSliceArray.asSlice[20].sPosition.dTra = -19.31341608 +sSliceArray.asSlice[20].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[20].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[20].dThickness = 2.5 +sSliceArray.asSlice[20].dPhaseFOV = 230 +sSliceArray.asSlice[20].dReadoutFOV = 230 +sSliceArray.asSlice[21].sPosition.dCor = -19.70028697 +sSliceArray.asSlice[21].sPosition.dTra = -16.3134572 +sSliceArray.asSlice[21].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[21].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[21].dThickness = 2.5 +sSliceArray.asSlice[21].dPhaseFOV = 230 +sSliceArray.asSlice[21].dReadoutFOV = 230 +sSliceArray.asSlice[22].sPosition.dCor = -19.68457908 +sSliceArray.asSlice[22].sPosition.dTra = -13.31349832 +sSliceArray.asSlice[22].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[22].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[22].dThickness = 2.5 +sSliceArray.asSlice[22].dPhaseFOV = 230 +sSliceArray.asSlice[22].dReadoutFOV = 230 +sSliceArray.asSlice[23].sPosition.dCor = -19.66887119 +sSliceArray.asSlice[23].sPosition.dTra = -10.31353945 +sSliceArray.asSlice[23].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[23].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[23].dThickness = 2.5 +sSliceArray.asSlice[23].dPhaseFOV = 230 +sSliceArray.asSlice[23].dReadoutFOV = 230 +sSliceArray.asSlice[24].sPosition.dCor = -19.6531633 +sSliceArray.asSlice[24].sPosition.dTra = -7.313580571 +sSliceArray.asSlice[24].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[24].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[24].dThickness = 2.5 +sSliceArray.asSlice[24].dPhaseFOV = 230 +sSliceArray.asSlice[24].dReadoutFOV = 230 +sSliceArray.asSlice[25].sPosition.dCor = -19.63745541 +sSliceArray.asSlice[25].sPosition.dTra = -4.313621695 +sSliceArray.asSlice[25].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[25].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[25].dThickness = 2.5 +sSliceArray.asSlice[25].dPhaseFOV = 230 +sSliceArray.asSlice[25].dReadoutFOV = 230 +sSliceArray.asSlice[26].sPosition.dCor = -19.62174752 +sSliceArray.asSlice[26].sPosition.dTra = -1.313662818 +sSliceArray.asSlice[26].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[26].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[26].dThickness = 2.5 +sSliceArray.asSlice[26].dPhaseFOV = 230 +sSliceArray.asSlice[26].dReadoutFOV = 230 +sSliceArray.asSlice[27].sPosition.dCor = -19.60603962 +sSliceArray.asSlice[27].sPosition.dTra = 1.686296059 +sSliceArray.asSlice[27].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[27].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[27].dThickness = 2.5 +sSliceArray.asSlice[27].dPhaseFOV = 230 +sSliceArray.asSlice[27].dReadoutFOV = 230 +sSliceArray.asSlice[28].sPosition.dCor = -19.59033173 +sSliceArray.asSlice[28].sPosition.dTra = 4.686254935 +sSliceArray.asSlice[28].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[28].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[28].dThickness = 2.5 +sSliceArray.asSlice[28].dPhaseFOV = 230 +sSliceArray.asSlice[28].dReadoutFOV = 230 +sSliceArray.asSlice[29].sPosition.dCor = -19.57462384 +sSliceArray.asSlice[29].sPosition.dTra = 7.686213812 +sSliceArray.asSlice[29].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[29].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[29].dThickness = 2.5 +sSliceArray.asSlice[29].dPhaseFOV = 230 +sSliceArray.asSlice[29].dReadoutFOV = 230 +sSliceArray.asSlice[30].sPosition.dCor = -19.55891595 +sSliceArray.asSlice[30].sPosition.dTra = 10.68617269 +sSliceArray.asSlice[30].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[30].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[30].dThickness = 2.5 +sSliceArray.asSlice[30].dPhaseFOV = 230 +sSliceArray.asSlice[30].dReadoutFOV = 230 +sSliceArray.asSlice[31].sPosition.dCor = -19.54320806 +sSliceArray.asSlice[31].sPosition.dTra = 13.68613156 +sSliceArray.asSlice[31].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[31].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[31].dThickness = 2.5 +sSliceArray.asSlice[31].dPhaseFOV = 230 +sSliceArray.asSlice[31].dReadoutFOV = 230 +sSliceArray.asSlice[32].sPosition.dCor = -19.52750017 +sSliceArray.asSlice[32].sPosition.dTra = 16.68609044 +sSliceArray.asSlice[32].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[32].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[32].dThickness = 2.5 +sSliceArray.asSlice[32].dPhaseFOV = 230 +sSliceArray.asSlice[32].dReadoutFOV = 230 +sSliceArray.asSlice[33].sPosition.dCor = -19.51179228 +sSliceArray.asSlice[33].sPosition.dTra = 19.68604932 +sSliceArray.asSlice[33].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[33].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[33].dThickness = 2.5 +sSliceArray.asSlice[33].dPhaseFOV = 230 +sSliceArray.asSlice[33].dReadoutFOV = 230 +sSliceArray.asSlice[34].sPosition.dCor = -19.49608438 +sSliceArray.asSlice[34].sPosition.dTra = 22.68600819 +sSliceArray.asSlice[34].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[34].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[34].dThickness = 2.5 +sSliceArray.asSlice[34].dPhaseFOV = 230 +sSliceArray.asSlice[34].dReadoutFOV = 230 +sSliceArray.asSlice[35].sPosition.dCor = -19.48037649 +sSliceArray.asSlice[35].sPosition.dTra = 25.68596707 +sSliceArray.asSlice[35].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[35].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[35].dThickness = 2.5 +sSliceArray.asSlice[35].dPhaseFOV = 230 +sSliceArray.asSlice[35].dReadoutFOV = 230 +sSliceArray.asSlice[36].sPosition.dCor = -19.4646686 +sSliceArray.asSlice[36].sPosition.dTra = 28.68592595 +sSliceArray.asSlice[36].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[36].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[36].dThickness = 2.5 +sSliceArray.asSlice[36].dPhaseFOV = 230 +sSliceArray.asSlice[36].dReadoutFOV = 230 +sSliceArray.asSlice[37].sPosition.dCor = -19.44896071 +sSliceArray.asSlice[37].sPosition.dTra = 31.68588482 +sSliceArray.asSlice[37].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[37].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[37].dThickness = 2.5 +sSliceArray.asSlice[37].dPhaseFOV = 230 +sSliceArray.asSlice[37].dReadoutFOV = 230 +sSliceArray.asSlice[38].sPosition.dCor = -19.43325282 +sSliceArray.asSlice[38].sPosition.dTra = 34.6858437 +sSliceArray.asSlice[38].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[38].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[38].dThickness = 2.5 +sSliceArray.asSlice[38].dPhaseFOV = 230 +sSliceArray.asSlice[38].dReadoutFOV = 230 +sSliceArray.asSlice[39].sPosition.dCor = -19.41754493 +sSliceArray.asSlice[39].sPosition.dTra = 37.68580258 +sSliceArray.asSlice[39].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[39].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[39].dThickness = 2.5 +sSliceArray.asSlice[39].dPhaseFOV = 230 +sSliceArray.asSlice[39].dReadoutFOV = 230 +sSliceArray.asSlice[40].sPosition.dCor = -19.40183703 +sSliceArray.asSlice[40].sPosition.dTra = 40.68576145 +sSliceArray.asSlice[40].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[40].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[40].dThickness = 2.5 +sSliceArray.asSlice[40].dPhaseFOV = 230 +sSliceArray.asSlice[40].dReadoutFOV = 230 +sSliceArray.asSlice[41].sPosition.dCor = -19.38612914 +sSliceArray.asSlice[41].sPosition.dTra = 43.68572033 +sSliceArray.asSlice[41].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[41].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[41].dThickness = 2.5 +sSliceArray.asSlice[41].dPhaseFOV = 230 +sSliceArray.asSlice[41].dReadoutFOV = 230 +sSliceArray.asSlice[42].sPosition.dCor = -19.37042125 +sSliceArray.asSlice[42].sPosition.dTra = 46.68567921 +sSliceArray.asSlice[42].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[42].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[42].dThickness = 2.5 +sSliceArray.asSlice[42].dPhaseFOV = 230 +sSliceArray.asSlice[42].dReadoutFOV = 230 +sSliceArray.asSlice[43].sPosition.dCor = -19.35471336 +sSliceArray.asSlice[43].sPosition.dTra = 49.68563808 +sSliceArray.asSlice[43].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[43].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[43].dThickness = 2.5 +sSliceArray.asSlice[43].dPhaseFOV = 230 +sSliceArray.asSlice[43].dReadoutFOV = 230 +sSliceArray.asSlice[44].sPosition.dCor = -19.33900547 +sSliceArray.asSlice[44].sPosition.dTra = 52.68559696 +sSliceArray.asSlice[44].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[44].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[44].dThickness = 2.5 +sSliceArray.asSlice[44].dPhaseFOV = 230 +sSliceArray.asSlice[44].dReadoutFOV = 230 +sSliceArray.asSlice[45].sPosition.dCor = -19.32329758 +sSliceArray.asSlice[45].sPosition.dTra = 55.68555584 +sSliceArray.asSlice[45].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[45].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[45].dThickness = 2.5 +sSliceArray.asSlice[45].dPhaseFOV = 230 +sSliceArray.asSlice[45].dReadoutFOV = 230 +sSliceArray.asSlice[46].sPosition.dCor = -19.30758969 +sSliceArray.asSlice[46].sPosition.dTra = 58.68551471 +sSliceArray.asSlice[46].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[46].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[46].dThickness = 2.5 +sSliceArray.asSlice[46].dPhaseFOV = 230 +sSliceArray.asSlice[46].dReadoutFOV = 230 +sSliceArray.asSlice[47].sPosition.dCor = -19.29188179 +sSliceArray.asSlice[47].sPosition.dTra = 61.68547359 +sSliceArray.asSlice[47].sNormal.dCor = 0.005235963828 +sSliceArray.asSlice[47].sNormal.dTra = 0.9999862922 +sSliceArray.asSlice[47].dThickness = 2.5 +sSliceArray.asSlice[47].dPhaseFOV = 230 +sSliceArray.asSlice[47].dReadoutFOV = 230 +sSliceArray.anAsc[1] = 1 +sSliceArray.anAsc[2] = 2 +sSliceArray.anAsc[3] = 3 +sSliceArray.anAsc[4] = 4 +sSliceArray.anAsc[5] = 5 +sSliceArray.anAsc[6] = 6 +sSliceArray.anAsc[7] = 7 +sSliceArray.anAsc[8] = 8 +sSliceArray.anAsc[9] = 9 +sSliceArray.anAsc[10] = 10 +sSliceArray.anAsc[11] = 11 +sSliceArray.anAsc[12] = 12 +sSliceArray.anAsc[13] = 13 +sSliceArray.anAsc[14] = 14 +sSliceArray.anAsc[15] = 15 +sSliceArray.anAsc[16] = 16 +sSliceArray.anAsc[17] = 17 +sSliceArray.anAsc[18] = 18 +sSliceArray.anAsc[19] = 19 +sSliceArray.anAsc[20] = 20 +sSliceArray.anAsc[21] = 21 +sSliceArray.anAsc[22] = 22 +sSliceArray.anAsc[23] = 23 +sSliceArray.anAsc[24] = 24 +sSliceArray.anAsc[25] = 25 +sSliceArray.anAsc[26] = 26 +sSliceArray.anAsc[27] = 27 +sSliceArray.anAsc[28] = 28 +sSliceArray.anAsc[29] = 29 +sSliceArray.anAsc[30] = 30 +sSliceArray.anAsc[31] = 31 +sSliceArray.anAsc[32] = 32 +sSliceArray.anAsc[33] = 33 +sSliceArray.anAsc[34] = 34 +sSliceArray.anAsc[35] = 35 +sSliceArray.anAsc[36] = 36 +sSliceArray.anAsc[37] = 37 +sSliceArray.anAsc[38] = 38 +sSliceArray.anAsc[39] = 39 +sSliceArray.anAsc[40] = 40 +sSliceArray.anAsc[41] = 41 +sSliceArray.anAsc[42] = 42 +sSliceArray.anAsc[43] = 43 +sSliceArray.anAsc[44] = 44 +sSliceArray.anAsc[45] = 45 +sSliceArray.anAsc[46] = 46 +sSliceArray.anAsc[47] = 47 +sSliceArray.anPos[1] = 1 +sSliceArray.anPos[2] = 2 +sSliceArray.anPos[3] = 3 +sSliceArray.anPos[4] = 4 +sSliceArray.anPos[5] = 5 +sSliceArray.anPos[6] = 6 +sSliceArray.anPos[7] = 7 +sSliceArray.anPos[8] = 8 +sSliceArray.anPos[9] = 9 +sSliceArray.anPos[10] = 10 +sSliceArray.anPos[11] = 11 +sSliceArray.anPos[12] = 12 +sSliceArray.anPos[13] = 13 +sSliceArray.anPos[14] = 14 +sSliceArray.anPos[15] = 15 +sSliceArray.anPos[16] = 16 +sSliceArray.anPos[17] = 17 +sSliceArray.anPos[18] = 18 +sSliceArray.anPos[19] = 19 +sSliceArray.anPos[20] = 20 +sSliceArray.anPos[21] = 21 +sSliceArray.anPos[22] = 22 +sSliceArray.anPos[23] = 23 +sSliceArray.anPos[24] = 24 +sSliceArray.anPos[25] = 25 +sSliceArray.anPos[26] = 26 +sSliceArray.anPos[27] = 27 +sSliceArray.anPos[28] = 28 +sSliceArray.anPos[29] = 29 +sSliceArray.anPos[30] = 30 +sSliceArray.anPos[31] = 31 +sSliceArray.anPos[32] = 32 +sSliceArray.anPos[33] = 33 +sSliceArray.anPos[34] = 34 +sSliceArray.anPos[35] = 35 +sSliceArray.anPos[36] = 36 +sSliceArray.anPos[37] = 37 +sSliceArray.anPos[38] = 38 +sSliceArray.anPos[39] = 39 +sSliceArray.anPos[40] = 40 +sSliceArray.anPos[41] = 41 +sSliceArray.anPos[42] = 42 +sSliceArray.anPos[43] = 43 +sSliceArray.anPos[44] = 44 +sSliceArray.anPos[45] = 45 +sSliceArray.anPos[46] = 46 +sSliceArray.anPos[47] = 47 +sSliceArray.lSize = 48 +sSliceArray.lConc = 1 +sSliceArray.ucMode = 0x2 +sSliceArray.sTSat.dThickness = 50 +sGroupArray.asGroup[0].nSize = 48 +sGroupArray.asGroup[0].dDistFact = 0.2 +sGroupArray.anMember[1] = 1 +sGroupArray.anMember[2] = 2 +sGroupArray.anMember[3] = 3 +sGroupArray.anMember[4] = 4 +sGroupArray.anMember[5] = 5 +sGroupArray.anMember[6] = 6 +sGroupArray.anMember[7] = 7 +sGroupArray.anMember[8] = 8 +sGroupArray.anMember[9] = 9 +sGroupArray.anMember[10] = 10 +sGroupArray.anMember[11] = 11 +sGroupArray.anMember[12] = 12 +sGroupArray.anMember[13] = 13 +sGroupArray.anMember[14] = 14 +sGroupArray.anMember[15] = 15 +sGroupArray.anMember[16] = 16 +sGroupArray.anMember[17] = 17 +sGroupArray.anMember[18] = 18 +sGroupArray.anMember[19] = 19 +sGroupArray.anMember[20] = 20 +sGroupArray.anMember[21] = 21 +sGroupArray.anMember[22] = 22 +sGroupArray.anMember[23] = 23 +sGroupArray.anMember[24] = 24 +sGroupArray.anMember[25] = 25 +sGroupArray.anMember[26] = 26 +sGroupArray.anMember[27] = 27 +sGroupArray.anMember[28] = 28 +sGroupArray.anMember[29] = 29 +sGroupArray.anMember[30] = 30 +sGroupArray.anMember[31] = 31 +sGroupArray.anMember[32] = 32 +sGroupArray.anMember[33] = 33 +sGroupArray.anMember[34] = 34 +sGroupArray.anMember[35] = 35 +sGroupArray.anMember[36] = 36 +sGroupArray.anMember[37] = 37 +sGroupArray.anMember[38] = 38 +sGroupArray.anMember[39] = 39 +sGroupArray.anMember[40] = 40 +sGroupArray.anMember[41] = 41 +sGroupArray.anMember[42] = 42 +sGroupArray.anMember[43] = 43 +sGroupArray.anMember[44] = 44 +sGroupArray.anMember[45] = 45 +sGroupArray.anMember[46] = 46 +sGroupArray.anMember[47] = 47 +sGroupArray.anMember[48] = -1 +sGroupArray.lSize = 1 +sGroupArray.sPSat.dThickness = 50 +sGroupArray.sPSat.dGap = 10 +sAutoAlign.dAAMatrix[0] = 1 +sAutoAlign.dAAMatrix[5] = 1 +sAutoAlign.dAAMatrix[10] = 1 +sAutoAlign.dAAMatrix[15] = 1 +sNavigatorPara.lBreathHoldMeas = 1 +sNavigatorPara.lRespComp = 4 +sNavigatorPara.alFree[22] = 2 +sNavigatorPara.adFree[13] = 150000 +sBladePara.dBladeCoverage = 100 +sBladePara.ucMotionCorr = 0x2 +sPrepPulses.ucFatSat = 0x1 +sPrepPulses.ucWaterSat = 0x4 +sPrepPulses.ucInversion = 0x4 +sPrepPulses.ucSatRecovery = 0x1 +sPrepPulses.ucT2Prep = 0x1 +sPrepPulses.ucTIScout = 0x1 +sPrepPulses.ucFatSatMode = 0x2 +sPrepPulses.dDarkBloodThickness = 200 +sPrepPulses.dDarkBloodFlipAngle = 200 +sPrepPulses.dT2PrepDuration = 40 +sPrepPulses.dIRPulseThicknessFactor = 0.77 +sKSpace.dPhaseResolution = 1 +sKSpace.dSliceResolution = 1 +sKSpace.dAngioDynCentralRegionA = 20 +sKSpace.dAngioDynSamplingDensityB = 25 +sKSpace.lBaseResolution = 128 +sKSpace.lPhaseEncodingLines = 128 +sKSpace.lPartitions = 64 +sKSpace.lImagesPerSlab = 64 +sKSpace.lRadialViews = 64 +sKSpace.lRadialInterleavesPerImage = 2 +sKSpace.lLinesPerShot = 1 +sKSpace.unReordering = 0x1 +sKSpace.dSeqPhasePartialFourierForSNR = 1 +sKSpace.ucPhasePartialFourier = 0x4 +sKSpace.ucSlicePartialFourier = 0x10 +sKSpace.ucAveragingMode = 0x2 +sKSpace.ucMultiSliceMode = 0x2 +sKSpace.ucDimension = 0x2 +sKSpace.ucTrajectory = 0x1 +sKSpace.ucViewSharing = 0x1 +sKSpace.ucAsymmetricEchoMode = 0x1 +sKSpace.ucPOCS = 0x1 +sFastImaging.lEPIFactor = 128 +sFastImaging.lTurboFactor = 1 +sFastImaging.lSliceTurboFactor = 1 +sFastImaging.lSegments = 1 +sFastImaging.ulEnableRFSpoiling = 0x1 +sFastImaging.ucSegmentationMode = 0x1 +sFastImaging.lShots = 1 +sFastImaging.lEchoTrainDuration = 700 +sPhysioImaging.lSignal1 = 1 +sPhysioImaging.lMethod1 = 1 +sPhysioImaging.lSignal2 = 1 +sPhysioImaging.lMethod2 = 1 +sPhysioImaging.lPhases = 1 +sPhysioImaging.lRetroGatedImages = 16 +sPhysioImaging.sPhysioECG.lTriggerPulses = 1 +sPhysioImaging.sPhysioECG.lTriggerWindow = 5 +sPhysioImaging.sPhysioECG.lArrhythmiaDetection = 1 +sPhysioImaging.sPhysioECG.lCardiacGateOnThreshold = 100000 +sPhysioImaging.sPhysioECG.lCardiacGateOffThreshold = 700000 +sPhysioImaging.sPhysioECG.lTriggerIntervals = 1 +sPhysioImaging.sPhysioPulse.lTriggerPulses = 1 +sPhysioImaging.sPhysioPulse.lTriggerWindow = 5 +sPhysioImaging.sPhysioPulse.lArrhythmiaDetection = 1 +sPhysioImaging.sPhysioPulse.lCardiacGateOnThreshold = 100000 +sPhysioImaging.sPhysioPulse.lCardiacGateOffThreshold = 700000 +sPhysioImaging.sPhysioPulse.lTriggerIntervals = 1 +sPhysioImaging.sPhysioExt.lTriggerPulses = 1 +sPhysioImaging.sPhysioExt.lTriggerWindow = 5 +sPhysioImaging.sPhysioExt.lArrhythmiaDetection = 1 +sPhysioImaging.sPhysioExt.lCardiacGateOnThreshold = 100000 +sPhysioImaging.sPhysioExt.lCardiacGateOffThreshold = 700000 +sPhysioImaging.sPhysioExt.lTriggerIntervals = 1 +sPhysioImaging.sPhysioResp.lRespGateThreshold = 20 +sPhysioImaging.sPhysioResp.lRespGatePhase = 2 +sPhysioImaging.sPhysioResp.dGatingRatio = 0.3 +sPhysioImaging.sPhysioNative.ucMode = 0x1 +sPhysioImaging.sPhysioNative.ucFlowSenMode = 0x1 +sSpecPara.lPhaseCyclingType = 1 +sSpecPara.lPhaseEncodingType = 1 +sSpecPara.lRFExcitationBandwidth = 1 +sSpecPara.ucRemoveOversampling = 0x1 +sSpecPara.lAutoRefScanNo = 1 +sSpecPara.lDecouplingType = 1 +sSpecPara.lNOEType = 1 +sSpecPara.lExcitationType = 1 +sSpecPara.lSpecAppl = 1 +sSpecPara.lSpectralSuppression = 1 +sDiffusion.lDiffWeightings = 2 +sDiffusion.alBValue[1] = 1000 +sDiffusion.lNoiseLevel = 40 +sDiffusion.lDiffDirections = 64 +sDiffusion.ulMode = 0x100 +sAngio.ucPCFlowMode = 0x2 +sAngio.ucTOFInflow = 0x4 +sAngio.lDynamicReconMode = 1 +sAngio.lTemporalInterpolation = 1 +sRawFilter.lSlope_256 = 25 +sRawFilter.ucOn = 0x1 +sRawFilter.ucMode = 0x1 +sDistortionCorrFilter.ucMode = 0x1 +sPat.lAccelFactPE = 2 +sPat.lAccelFact3D = 1 +sPat.lRefLinesPE = 38 +sPat.ucPATMode = 0x2 +sPat.ucRefScanMode = 0x4 +sPat.ucTPatAverageAllFrames = 0x1 +sMDS.ulMdsModeMask = 0x1 +sMDS.ulMdsVariableResolution = 0x1 +sMDS.lTableSpeedNumerator = 1 +sMDS.lmdsLinesPerSegment = 15 +sMDS.sMdsEndPosSBCS_mm.dTra = 600 +sMDS.ulMdsReconMode = 0x1 +sMDS.dMdsRangeExtension = 600 +ucEnableIntro = 0x1 +ucDisableChangeStoreImages = 0x1 +ucAAMode = 0x1 +ucAARegionMode = 1 +ucAARefMode = 1 +ucReconstructionMode = 0x1 +ucOneSeriesForAllMeas = 0x1 +ucPHAPSMode = 0x1 +ucDixon = 0x1 +ucDixonSaveOriginal = 0x1 +ucWaitForPrepareCompletion = 0x1 +lAverages = 1 +dAveragesDouble = 1 +adFlipAngleDegree[0] = 90 +lScanTimeSec = 449 +lTotalScanTimeSec = 450 +dRefSNR = 33479.60771 +dRefSNR_VOI = 33479.60771 +tdefaultEVAProt = ""%SiemensEvaDefProt%\DTI\DTI.evp"" +asCoilSelectMeas[0].tNucleus = ""1H"" +asCoilSelectMeas[0].iUsedRFactor = 3 +asCoilSelectMeas[0].asList[0].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[0].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[0].sCoilElementID.tElement = ""H3P"" +asCoilSelectMeas[0].asList[0].lElementSelected = 1 +asCoilSelectMeas[0].asList[0].lRxChannelConnected = 1 +asCoilSelectMeas[0].asList[1].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[1].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[1].sCoilElementID.tElement = ""H4P"" +asCoilSelectMeas[0].asList[1].lElementSelected = 1 +asCoilSelectMeas[0].asList[1].lRxChannelConnected = 2 +asCoilSelectMeas[0].asList[2].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[2].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[2].sCoilElementID.tElement = ""H4S"" +asCoilSelectMeas[0].asList[2].lElementSelected = 1 +asCoilSelectMeas[0].asList[2].lRxChannelConnected = 3 +asCoilSelectMeas[0].asList[3].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[3].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[3].sCoilElementID.tElement = ""H4T"" +asCoilSelectMeas[0].asList[3].lElementSelected = 1 +asCoilSelectMeas[0].asList[3].lRxChannelConnected = 4 +asCoilSelectMeas[0].asList[4].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[4].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[4].sCoilElementID.tElement = ""H3S"" +asCoilSelectMeas[0].asList[4].lElementSelected = 1 +asCoilSelectMeas[0].asList[4].lRxChannelConnected = 5 +asCoilSelectMeas[0].asList[5].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[5].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[5].sCoilElementID.tElement = ""H3T"" +asCoilSelectMeas[0].asList[5].lElementSelected = 1 +asCoilSelectMeas[0].asList[5].lRxChannelConnected = 6 +asCoilSelectMeas[0].asList[6].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[6].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[6].sCoilElementID.tElement = ""H1P"" +asCoilSelectMeas[0].asList[6].lElementSelected = 1 +asCoilSelectMeas[0].asList[6].lRxChannelConnected = 7 +asCoilSelectMeas[0].asList[7].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[7].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[7].sCoilElementID.tElement = ""H2P"" +asCoilSelectMeas[0].asList[7].lElementSelected = 1 +asCoilSelectMeas[0].asList[7].lRxChannelConnected = 8 +asCoilSelectMeas[0].asList[8].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[8].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[8].sCoilElementID.tElement = ""H2S"" +asCoilSelectMeas[0].asList[8].lElementSelected = 1 +asCoilSelectMeas[0].asList[8].lRxChannelConnected = 9 +asCoilSelectMeas[0].asList[9].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[9].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[9].sCoilElementID.tElement = ""H2T"" +asCoilSelectMeas[0].asList[9].lElementSelected = 1 +asCoilSelectMeas[0].asList[9].lRxChannelConnected = 10 +asCoilSelectMeas[0].asList[10].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[10].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[10].sCoilElementID.tElement = ""H1S"" +asCoilSelectMeas[0].asList[10].lElementSelected = 1 +asCoilSelectMeas[0].asList[10].lRxChannelConnected = 11 +asCoilSelectMeas[0].asList[11].sCoilElementID.tCoilID = ""HeadMatrix"" +asCoilSelectMeas[0].asList[11].sCoilElementID.lCoilCopy = 1 +asCoilSelectMeas[0].asList[11].sCoilElementID.tElement = ""H1T"" +asCoilSelectMeas[0].asList[11].lElementSelected = 1 +asCoilSelectMeas[0].asList[11].lRxChannelConnected = 12 +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[0] = 0xff +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[1] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[2] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[3] = 0xad +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[4] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[5] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[6] = 0x5d +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[7] = 0xb1 +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[8] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[9] = 0xb2 +asCoilSelectMeas[0].sCOILPLUGS.aulPlugId[10] = 0xee +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[0] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[1] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[2] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[3] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[4] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[5] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[6] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[7] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[8] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[9] = 0x2 +asCoilSelectMeas[0].sCOILPLUGS.auiNmbrOfNibbles[10] = 0x2 +asCoilSelectMeas[0].aFFT_SCALE[0].flFactor = 3.77259 +asCoilSelectMeas[0].aFFT_SCALE[0].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[0].lRxChannel = 1 +asCoilSelectMeas[0].aFFT_SCALE[1].flFactor = 3.83164 +asCoilSelectMeas[0].aFFT_SCALE[1].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[1].lRxChannel = 2 +asCoilSelectMeas[0].aFFT_SCALE[2].flFactor = 3.7338 +asCoilSelectMeas[0].aFFT_SCALE[2].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[2].lRxChannel = 3 +asCoilSelectMeas[0].aFFT_SCALE[3].flFactor = 4.08449 +asCoilSelectMeas[0].aFFT_SCALE[3].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[3].lRxChannel = 4 +asCoilSelectMeas[0].aFFT_SCALE[4].flFactor = 3.82172 +asCoilSelectMeas[0].aFFT_SCALE[4].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[4].lRxChannel = 5 +asCoilSelectMeas[0].aFFT_SCALE[5].flFactor = 3.86816 +asCoilSelectMeas[0].aFFT_SCALE[5].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[5].lRxChannel = 6 +asCoilSelectMeas[0].aFFT_SCALE[6].flFactor = 4.48252 +asCoilSelectMeas[0].aFFT_SCALE[6].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[6].lRxChannel = 7 +asCoilSelectMeas[0].aFFT_SCALE[7].flFactor = 4.39406 +asCoilSelectMeas[0].aFFT_SCALE[7].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[7].lRxChannel = 8 +asCoilSelectMeas[0].aFFT_SCALE[8].flFactor = 4.50498 +asCoilSelectMeas[0].aFFT_SCALE[8].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[8].lRxChannel = 9 +asCoilSelectMeas[0].aFFT_SCALE[9].flFactor = 4.57011 +asCoilSelectMeas[0].aFFT_SCALE[9].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[9].lRxChannel = 10 +asCoilSelectMeas[0].aFFT_SCALE[10].flFactor = 4.6211 +asCoilSelectMeas[0].aFFT_SCALE[10].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[10].lRxChannel = 11 +asCoilSelectMeas[0].aFFT_SCALE[11].flFactor = 4.69845 +asCoilSelectMeas[0].aFFT_SCALE[11].bValid = 1 +asCoilSelectMeas[0].aFFT_SCALE[11].lRxChannel = 12 +sEFISPEC.bEFIDataValid = 1 +ucCineMode = 0x1 +ucSequenceType = 0x4 +ucCoilCombineMode = 0x2 +ucFlipAngleMode = 0x1 +lTOM = 1 +lProtID = -434 +ucReadOutMode = 0x1 +ucBold3dPace = 0x1 +ucForcePositioningOnNDIS = 0x1 +ucInternalTablePosValid = 0x1 +sParametricMapping.ucParametricMap = 0x1 +sIR.lScanNumber = 1 +sAsl.ulMode = 0x1 +WaitForUserStart = 0x1 +ucAutoAlignInit = 0x1 +### ASCCONV END ### \ No newline at end of file diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py new file mode 100644 index 0000000000..7addb89d51 --- /dev/null +++ b/nibabel/nicom/tests/test_ascconv.py @@ -0,0 +1,40 @@ +""" Testing Siemens "ASCCONV" parser +""" + +from os.path import join as pjoin, dirname + +from .. import ascconv +from ...externals import OrderedDict + +from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) +from numpy.testing import assert_array_equal, assert_array_almost_equal + +DATA_PATH = pjoin(dirname(__file__), 'data') +ASCCONV_INPUT = pjoin(DATA_PATH, 'ascconv_sample.txt') + + +def test_ascconv_parse(): + with open(ASCCONV_INPUT, 'rt') as fobj: + contents = fobj.read() + ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', contents) + assert_equal(attrs, OrderedDict()) + assert_equal(len(ascconv_dict), 917) + assert_equal(ascconv_dict['tProtocolName'], 'CBU+AF8-DTI+AF8-64D+AF8-1A') + assert_equal(ascconv_dict['ucScanRegionPosValid'], 1) + assert_array_almost_equal(ascconv_dict['sProtConsistencyInfo.flNominalB0'], + 2.89362) + assert_equal(ascconv_dict['sProtConsistencyInfo.flGMax'], 26) + + +def test_ascconv_w_attrs(): + in_str = ("### ASCCONV BEGIN object=MrProtDataImpl@MrProtocolData " + "version=41340006 " + "converter=%MEASCONST%/ConverterList/Prot_Converter.txt ###\n" + "test = \"hello\"\n" + "### ASCCONV END ###") + ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', in_str) + assert_equal(attrs['object'], 'MrProtDataImpl@MrProtocolData') + assert_equal(attrs['version'], '41340006') + assert_equal(attrs['converter'], + '%MEASCONST%/ConverterList/Prot_Converter.txt') + assert_equal(ascconv_dict['test'], 'hello') From 216780dd81d0872d0183a1fb67e97c0f4ba882ed Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Tue, 24 Mar 2020 13:41:20 -0700 Subject: [PATCH 689/689] TST: Update to use pytest --- nibabel/nicom/tests/test_ascconv.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/nibabel/nicom/tests/test_ascconv.py b/nibabel/nicom/tests/test_ascconv.py index 7addb89d51..acc67da376 100644 --- a/nibabel/nicom/tests/test_ascconv.py +++ b/nibabel/nicom/tests/test_ascconv.py @@ -6,7 +6,6 @@ from .. import ascconv from ...externals import OrderedDict -from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) from numpy.testing import assert_array_equal, assert_array_almost_equal DATA_PATH = pjoin(dirname(__file__), 'data') @@ -17,13 +16,13 @@ def test_ascconv_parse(): with open(ASCCONV_INPUT, 'rt') as fobj: contents = fobj.read() ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', contents) - assert_equal(attrs, OrderedDict()) - assert_equal(len(ascconv_dict), 917) - assert_equal(ascconv_dict['tProtocolName'], 'CBU+AF8-DTI+AF8-64D+AF8-1A') - assert_equal(ascconv_dict['ucScanRegionPosValid'], 1) + assert attrs == OrderedDict() + assert len(ascconv_dict) == 917 + assert ascconv_dict['tProtocolName'] == 'CBU+AF8-DTI+AF8-64D+AF8-1A' + assert ascconv_dict['ucScanRegionPosValid'] == 1 assert_array_almost_equal(ascconv_dict['sProtConsistencyInfo.flNominalB0'], 2.89362) - assert_equal(ascconv_dict['sProtConsistencyInfo.flGMax'], 26) + assert ascconv_dict['sProtConsistencyInfo.flGMax'] == 26 def test_ascconv_w_attrs(): @@ -33,8 +32,7 @@ def test_ascconv_w_attrs(): "test = \"hello\"\n" "### ASCCONV END ###") ascconv_dict, attrs = ascconv.parse_ascconv('MrPhoenixProtocol', in_str) - assert_equal(attrs['object'], 'MrProtDataImpl@MrProtocolData') - assert_equal(attrs['version'], '41340006') - assert_equal(attrs['converter'], - '%MEASCONST%/ConverterList/Prot_Converter.txt') - assert_equal(ascconv_dict['test'], 'hello') + assert attrs['object'] == 'MrProtDataImpl@MrProtocolData' + assert attrs['version'] == '41340006' + assert attrs['converter'] == '%MEASCONST%/ConverterList/Prot_Converter.txt' + assert ascconv_dict['test'] == 'hello'