Skip to content
Open
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,9 @@ public SpimData2 createDataset( final String xmlFileName )

final DatasetAttributes attr = reader.getDatasetAttributes( dataset + "/" + path );

if (attr == null) {
continue;
}
IOFunctions.println( "NumDimensions: " + attr.getNumDimensions() );
IOFunctions.println( "Dimensions: " + Arrays.toString( attr.getDimensions() ) );
IOFunctions.println( "BlockSize: " + Arrays.toString( attr.getBlockSize() ) );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import mpicbg.spim.data.sequence.VoxelDimensions;
import org.bigdataviewer.n5.N5CloudImageLoader;
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
Expand Down Expand Up @@ -188,13 +189,15 @@ else if ( n5Params.format == StorageFormat.HDF5 )
}
else
{
VoxelDimensions vx = data.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize();
// 5d OME-ZARR with dimension=1 in c and t
mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR(
n5Writer,
viewId,
dataTypes.get( viewId.getViewSetupId() ),
dimensions.get( viewId.getViewSetupId() ),
//data.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(),
vx.dimensionsAsDoubleArray(), // resolutionS0
vx.unit(),
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In this case it saves a single viewsetup so I think it is ok to use the voxel attributes from the view setup

compression,
blockSize,
downsamplings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
package net.preibisch.mvrecon.fiji.spimdata.imgloaders;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

import org.janelia.saalfeldlab.n5.DataType;
Expand All @@ -42,25 +43,30 @@ public class AllenOMEZarrProperties implements N5Properties
{
private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription;

private final Map< ViewId, OMEZARREntry > viewIdToPath;
// mapping of viewIDs to corresponding OME-ZARRs
private final Map< ViewId, OMEZARREntry > viewIdToOmeZarrPath;

// Cache for OME multiscale metadata per setupId so that we can retrieve the appropriate dataset path (e.g., "s0", "s1", "s2" or "0", "1", "2")
private final Map< ViewId, OmeNgffMultiScaleMetadata > viewIdToOmeMetadata = new HashMap<>();

public AllenOMEZarrProperties(
final AbstractSequenceDescription< ?, ?, ? > sequenceDescription,
final Map< ViewId, OMEZARREntry > viewIdToPath )
final Map< ViewId, OMEZARREntry > viewIdToOmeZarrPath)
{
this.sequenceDescription = sequenceDescription;
this.viewIdToPath = viewIdToPath;
this.viewIdToOmeZarrPath = viewIdToOmeZarrPath;
}

private String getPath( final int setupId, final int timepointId )
{
return viewIdToPath.get( new ViewId( timepointId, setupId ) ).getPath();
return viewIdToOmeZarrPath.get( new ViewId( timepointId, setupId ) ).getPath();
}

@Override
public String getDatasetPath( final int setupId, final int timepointId, final int level )
{
return String.format( getPath( setupId, timepointId )+ "/%d", level );
// Note: if the OME metadata has not been cached yet this method will return the default path, because the reader is not available
return getMultiscaleDatasetPathOrDefault(null, timepointId, setupId, level);
}

@Override
Expand All @@ -78,7 +84,7 @@ public double[][] getMipmapResolutions( final N5Reader n5, final int setupId )
@Override
public long[] getDimensions( final N5Reader n5, final int setupId, final int timepointId, final int level )
{
final String path = getDatasetPath( setupId, timepointId, level );
final String path = getMultiscaleDatasetPathOrDefault(n5, timepointId, setupId, level);
final long[] dimensions = n5.getDatasetAttributes( path ).getDimensions();
// dataset dimensions is 5D, remove the channel and time dimensions
return Arrays.copyOf( dimensions, 3 );
Expand All @@ -96,40 +102,29 @@ private static int getFirstAvailableTimepointId( final AbstractSequenceDescripti
return tp.getId();
}

throw new RuntimeException( "All timepoints for setupId " + setupId + " are declared missing. Stopping." );
throw new IllegalStateException( "All timepoints for setupId " + setupId + " are declared missing. Stopping." );
}

private static DataType getDataType( final AllenOMEZarrProperties n5properties, final N5Reader n5, final int setupId )
{
final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId );
return n5.getDatasetAttributes( n5properties.getDatasetPath( setupId, timePointId, 0 ) ).getDataType();
String datasetPath = n5properties.getMultiscaleDatasetPathOrDefault(n5, timePointId, setupId, 0);
return n5.getDatasetAttributes( datasetPath ).getDataType();
}

private static double[][] getMipMapResolutions( final AllenOMEZarrProperties n5properties, final N5Reader n5, final int setupId )
{
final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId );

// multiresolution pyramid
OmeNgffMultiScaleMetadata multiScaleMetadata = n5properties.getViewSetupMultiscaleMetadata(n5, timePointId, setupId);

//org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata
// for this to work you need to register an adapter in the N5Factory class
// final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() );
final OmeNgffMultiScaleMetadata[] multiscales = n5.getAttribute( n5properties.getPath( setupId, timePointId ), "multiscales", OmeNgffMultiScaleMetadata[].class );

if ( multiscales == null || multiscales.length == 0 )
throw new RuntimeException( "Could not parse OME-ZARR multiscales object. stopping." );

if ( multiscales.length != 1 )
System.out.println( "This dataset has " + multiscales.length + " objects, we expected 1. Picking the first one." );

//System.out.println( "AllenOMEZarrLoader.getMipmapResolutions() for " + setupId + " using " + n5properties.getPath( setupId, timePointId ) + ": found " + multiscales[ 0 ].datasets.length + " multi-resolution levels." );

double[][] mipMapResolutions = new double[ multiscales[ 0 ].datasets.length ][ 3 ];
double[][] mipMapResolutions = new double[ multiScaleMetadata.datasets.length ][ 3 ];
double[] firstScale = null;

for ( int i = 0; i < multiscales[ 0 ].datasets.length; ++i )
for ( int i = 0; i < multiScaleMetadata.datasets.length; ++i )
{
final OmeNgffDataset ds = multiscales[ 0 ].datasets[ i ];
final OmeNgffDataset ds = multiScaleMetadata.datasets[ i ];

for ( final CoordinateTransformation< ? > c : ds.coordinateTransformations )
{
Expand All @@ -145,11 +140,49 @@ private static double[][] getMipMapResolutions( final AllenOMEZarrProperties n5p
mipMapResolutions[ i ][ d ] = s.getScale()[ d ] / firstScale[ d ];
mipMapResolutions[ i ][ d ] = Math.round(mipMapResolutions[ i ][ d ]*10000)/10000d; // round to the 5th digit
}
//System.out.println( "AllenOMEZarrLoader.getMipmapResolutions(), level " + i + ": " + Arrays.toString( s.getScale() ) + " >> " + Arrays.toString( mipMapResolutions[ i ] ) );
}
}
}

return mipMapResolutions;
}

private String getMultiscaleDatasetPathOrDefault( N5Reader n5, int timepointId, int setupId, int level )
{
OmeNgffMultiScaleMetadata omeNgffMultiScaleMetadata = getViewSetupMultiscaleMetadata(n5, timepointId, setupId);

String viewSetupPath = getPath( setupId, timepointId );
String datasetPath;

if ( omeNgffMultiScaleMetadata != null ) {
// get the first scale path from the metadata
datasetPath = omeNgffMultiScaleMetadata.datasets[level].path;
} else {
// use the default level value
datasetPath = String.valueOf( level );
}

return String.format( "%s/%s", viewSetupPath, datasetPath);
}

// retrieve and cache the multiscale metadata
private OmeNgffMultiScaleMetadata getViewSetupMultiscaleMetadata(N5Reader n5, int timePointId, int setupId) {
ViewId viewId = new ViewId(timePointId, setupId);

return viewIdToOmeMetadata.computeIfAbsent(viewId, k -> {
if (n5 == null) {
return null; // no mapping will be cached
}

final OmeNgffMultiScaleMetadata[] multiscales = n5.getAttribute( getPath( setupId, timePointId ), "multiscales", OmeNgffMultiScaleMetadata[].class );

if ( multiscales == null || multiscales.length == 0 )
throw new IllegalStateException( "Could not parse OME-ZARR multiscales object. stopping." );

if ( multiscales.length > 1 )
System.out.println( "This dataset has " + multiscales.length + " objects, we expected 1. Picking the first one." );

return multiscales[0];
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,7 @@
import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.ScaleCoordinateTransformation;
import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.TranslationCoordinateTransformation;

import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.realtransform.AffineTransform3D;
import net.preibisch.mvrecon.process.interestpointregistration.TransformationTools;
import util.URITools;

public class OMEZarrAttibutes
Expand All @@ -61,12 +59,6 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(
final Function<Integer, String> levelToName,
final Function<Integer, AffineTransform3D > levelToMipmapTransform )
{
// TODO: make sure the unit is supported by OME-ZARR, if not replace it because otherwise readers will fail
// TODO: e.g. um -> micrometer
// TODO: etc.
// TODO: can you find out what the correct unit for 'unit unknown' is, because that is what I would replace it with, otherwise micrometer
// TOOD: then please also change in TransformationTools.computeCalibration

final OmeNgffMultiScaleMetadata[] meta = new OmeNgffMultiScaleMetadata[ 1 ];

// dataset name and co
Expand All @@ -85,9 +77,10 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(
if ( n >= 4 )
axes[ index++ ] = new Axis( "channel", "c", null );

axes[ index++ ] = new Axis( "space", "z", unitXYZ );
axes[ index++ ] = new Axis( "space", "y", unitXYZ );
axes[ index++ ] = new Axis( "space", "x", unitXYZ );
String unit = adaptSpatialUnit( unitXYZ );
axes[ index ] = new Axis( "space", "z", unit );
axes[ index + 1 ] = new Axis( "space", "y", unit );
axes[ index + 2 ] = new Axis( "space", "x", unit );

// multiresolution-pyramid
// TODO: seem to be in XYZCT order (but in the file it seems reversed)
Expand All @@ -106,8 +99,8 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(

for ( int d = 0; d < 3; ++d )
{
translation[ d ] = m.getTranslation()[ d ];
scale[ d ] = resolutionS0[ d ] * m.get( d, d );
translation[ d ] = resolutionS0[d] * m.getTranslation()[ d ];
scale[ d ] = resolutionS0[d] * m.get( d, d );
}

// if 4d and 5d, add 1's for C and T
Expand Down Expand Up @@ -137,27 +130,74 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(
return meta;
}


// Note: TransformationTools.computeAverageCalibration does this reasonably correct
/*
public static double[] getResolutionS0( final VoxelDimensions vx, final double anisoF, final double downsamplingF )
public static double[] getResolutionS0( final double[] cal, final double anisoF, final double downsamplingF )
{
final double[] resolutionS0 = vx.dimensionsAsDoubleArray();
double[] resolutionS0 = Arrays.copyOf( cal, cal.length );

// not preserving anisotropy
if ( Double.isNaN( anisoF ) )
resolutionS0[ 2 ] = resolutionS0[ 0 ];
if ( !Double.isNaN( anisoF ) ) {
// preserving anisotropy
resolutionS0[2] = cal[2] * anisoF;
}

// downsampling
if ( !Double.isNaN( downsamplingF ) )
Arrays.setAll( resolutionS0, d -> resolutionS0[ d ] * downsamplingF );

// TODO: this is a hack so the export downsampling pyramid is working
Arrays.setAll( resolutionS0, d -> 1 );

return resolutionS0;
}
*/

/**
* Adapt various space unit namings to the units supported by Neuroglancer.
* OME NGFF spec does not have any restrictions on units but Neuroglancer only supports the ones that end in meter or the US customary units.
* @param unit
* @return
*/
private static String adaptSpatialUnit(String unit)
{
if ( unit == null )
return "micrometer";

switch ( unit.toLowerCase() ) {
case "angstrom":
case "ångström":
case "ångströms":
return "angstrom";
case "nm":
case "nanometers":
case "nanometer":
return "nanometer";
case "mm":
case "millimeters":
case "millimeter":
return "millimeter";
case "m":
case "meters":
case "meter":
return "meter";
case "km":
case "kilometer":
case "kilometers":
return "kilometer";
case "inch":
case "inches":
return "inch";
case "foot":
case "feet":
return "foot";
case "yard":
case "yards":
return "yard";
case "mile":
case "miles":
return "mile";
case "um":
case "μm":
case "microns":
case "micron":
default:
return "micrometer";
}
}

public static void loadOMEZarr( final N5Reader n5, final String dataset )
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,23 +245,21 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
final Function<Integer, AffineTransform3D> levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfoZarr[level].absoluteDownsamplingDouble() );

IOFunctions.println( "Resolution of level 0: " + Util.printCoordinates( cal ) + " " + unit ); //vx.unit() might not be OME-ZARR compatible
double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, anisoF, downsamplingF );

IOFunctions.println( "Calibration: " + Util.printCoordinates( cal ) + " micrometer; resolution at S0: " + Util.printCoordinates( resolutionS0 ) + " " + unit);

// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
5, // int n
"/", // String name, I also saw "/"
cal, // double[] resolutionS0,
resolutionS0, // double[] resolutionS0,
unit, //"micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfoZarr.length, // int numResolutionLevels,
levelToName,
levelToMipmapTransform );

// save metadata

//org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata
// for this to work you need to register an adapter in the N5Factory class
// final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() );
driverVolumeWriter.setAttribute( "/", "multiscales", meta );
}
}
Expand Down Expand Up @@ -362,14 +360,16 @@ else if ( storageType == StorageFormat.ZARR ) // OME-Zarr export
final Function<Integer, AffineTransform3D> levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() );

IOFunctions.println( "Resolution of level 0: " + Util.printCoordinates( cal ) + " micrometer" );
double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, anisoF, downsamplingF );

IOFunctions.println( "Calibration: " + Util.printCoordinates( cal ) + " micrometer; resolution at S0: " + Util.printCoordinates( resolutionS0 ) + " " + unit);

// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
3, // int n
omeZarrSubContainer, // String name, I also saw "/"
cal, // double[] resolutionS0,
unit, //"micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
resolutionS0, // double[] resolutionS0,
unit, // might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfo.length, // int numResolutionLevels,
(level) -> "/" + level,
levelToMipmapTransform );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,13 +320,13 @@ public static Pair< double[], String > computeAverageCalibration(
else if ( unit.equalsIgnoreCase( transformedCal.getB() ) )
unit = transformedCal.getB();
else
unit = "inconsisistent";
unit = "inconsistent";

System.out.println( "Calibration (transformed): " + Util.printCoordinates( transformedCal.getA() ) + " " + transformedCal.getB() );
}

if ( count == 0 )
return new ValuePair<>( new double[] { 1, 1, 1 }, "px" );
return new ValuePair<>( new double[] { 1, 1, 1 }, "micrometer" );
else
return new ValuePair<>( new double[] { avgCalX / (double)count, avgCalY / (double)count, avgCalZ / (double)count }, unit );
}
Expand Down
Loading