diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8c6a82ef..51d62755 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,10 +25,30 @@ jobs: distribution: 'zulu' - name: Install dependency run: sudo apt-get install libblosc1 + - name: Checkout zarr-java + uses: actions/checkout@v4 + with: + repository: zarr-developers/zarr-java + path: zarr-java + - name: Build and install zarr-java + run: | + cd zarr-java + mvn clean install -DskipTests -Dgpg.skip=true + cd .. - name: Run commands run: | ./gradlew ${{ env.gradle_commands }} - - name: Publish artifacts - if: github.event_name != 'pull_request' && matrix.java == 11 && github.repository_owner == 'glencoesoftware' + cp build/libs/*.jar build/ + - name: Get dependencies + if: matrix.java == 11 run: | - ./gradlew -PArtifactoryUserName=${ArtifactoryUserName} -PArtifactoryPassword=${ArtifactoryPassword} publish + ./gradlew copyDependencies + cp -v build/libs/*.jar libs/ + - name: Upload artifacts + if: matrix.java == 11 + uses: actions/upload-artifact@v4 + with: + name: artifacts + path: | + libs/*.jar + build/*.jar diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index c66044b6..cf48cdb0 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -32,6 +32,19 @@ jobs: - language: java-kotlin build-mode: autobuild steps: + # Build zarr-java first + - name: Install dependency + run: sudo apt-get install libblosc1 + - name: Checkout zarr-java + uses: actions/checkout@v4 + with: + repository: zarr-developers/zarr-java + path: zarr-java + - name: Build and install zarr-java + run: | + cd zarr-java + mvn clean install -DskipTests -Dgpg.skip=true + # Then do codeql stuff - name: Checkout repository uses: actions/checkout@v4 diff --git a/build.gradle b/build.gradle index 02f3a24b..8f25d66b 100644 --- a/build.gradle +++ b/build.gradle @@ -8,10 +8,19 @@ plugins { group = 'com.glencoesoftware.omero' version = '0.6.0-SNAPSHOT' -sourceCompatibility = 1.11 -targetCompatibility = 1.11 +// Java 11 compatibility configuration +java { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 +} + +// Ensure UTF-8 encoding for compilation +tasks.withType(JavaCompile) { + options.encoding = 'UTF-8' +} repositories { + mavenLocal() mavenCentral() maven { name 'Unidata' @@ -33,11 +42,26 @@ repositories { dependencies { api 'org.openmicroscopy:omero-blitz:5.8.3' + implementation 'com.github.ben-manes.caffeine:caffeine:3.1.8' implementation 'dev.zarr:jzarr:0.4.2' - implementation 'org.lasersonlab:s3fs:2.2.3' - implementation 'com.amazonaws:aws-java-sdk-s3:1.12.659' + + implementation 'org.carlspring.cloud.aws:s3fs-nio:3.0.0' + + implementation 'software.amazon.awssdk:s3:2.34.6' implementation 'org.apache.tika:tika-core:1.28.5' + implementation 'net.java.dev.jna:jna:5.10.0' + implementation 'ome:formats-gpl:7.3.1' + implementation 'info.picocli:picocli:4.7.5' + implementation 'com.univocity:univocity-parsers:2.8.4' + implementation 'dev.zarr:zarr-java:0.0.5-SNAPSHOT' + implementation 'javax.xml.bind:jaxb-api:2.3.0' + implementation group: 'ch.qos.logback', name: 'logback-classic', version: '1.3.14' + implementation group: 'ch.qos.logback', name: 'logback-core', version: '1.3.14' + implementation 'org.apache.maven:maven-artifact:3.9.4' + + implementation 'com.fasterxml.jackson.core:jackson-core:2.20.0' + implementation 'com.fasterxml.jackson.core:jackson-databind:2.20.0' testImplementation 'info.picocli:picocli:4.7.5' testImplementation 'com.glencoesoftware:bioformats2raw:0.11.0' @@ -92,3 +116,9 @@ jar { checkstyle { toolVersion = "10.26.1" } + +// Task to copy all runtime dependencies to libs directory +tasks.register('copyDependencies', Copy) { + from configurations.runtimeClasspath + into 'libs' +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java b/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java index f078056b..9af2dfa7 100644 --- a/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java +++ b/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java @@ -18,65 +18,63 @@ package com.glencoesoftware.omero.zarr; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.AWSCredentialsProviderChain; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.AnonymousAWSCredentials; -import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper; -import com.amazonaws.auth.profile.ProfileCredentialsProvider; -import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; -import com.amazonaws.regions.Regions; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.util.AwsHostNameUtils; -import com.upplication.s3fs.AmazonS3ClientFactory; import java.net.URI; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.Properties; +import org.carlspring.cloud.storage.s3fs.S3ClientFactory; import org.slf4j.LoggerFactory; +import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProviderChain; +import software.amazon.awssdk.auth.credentials.InstanceProfileCredentialsProvider; +import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider; +import software.amazon.awssdk.awscore.util.AwsHostNameUtils; +import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; /** * Subclass which maps an URI into a set of credentials to use for the client. */ -public class OmeroAmazonS3ClientFactory extends AmazonS3ClientFactory { +public class OmeroAmazonS3ClientFactory extends S3ClientFactory { - private static final org.slf4j.Logger log = - LoggerFactory.getLogger(OmeroAmazonS3ClientFactory.class); + private static final org.slf4j.Logger log = LoggerFactory + .getLogger(OmeroAmazonS3ClientFactory.class); - private static final Map bucketClientMap = new HashMap<>(); + private static final Map bucketClientMap = new HashMap<>(); @Override - protected AWSCredentialsProvider getCredentialsProvider(Properties props) { + protected AwsCredentialsProvider getCredentialsProvider(Properties props) { // If AWS Environment or System Properties are set, throw an exception // so users will know they are not supported if (System.getenv("AWS_ACCESS_KEY_ID") != null - || System.getenv("AWS_SECRET_ACCESS_KEY") != null - || System.getenv("AWS_SESSION_TOKEN") != null - || System.getProperty("aws.accessKeyId") != null - || System.getProperty("aws.secretAccessKey") != null) { + || System.getenv("AWS_SECRET_ACCESS_KEY") != null + || System.getenv("AWS_SESSION_TOKEN") != null + || System.getProperty("aws.accessKeyId") != null + || System.getProperty("aws.secretAccessKey") != null) { throw new RuntimeException("AWS credentials supplied by environment variables" - + " or Java system properties are not supported." - + " Please use either named profiles or instance" - + " profile credentials."); + + " or Java system properties are not supported." + + " Please use either named profiles or instance" + " profile credentials."); } - boolean anonymous = Boolean.parseBoolean( - (String) props.get("s3fs_anonymous")); + boolean anonymous = Boolean.parseBoolean((String) props.get("s3fs_anonymous")); if (anonymous) { log.debug("Using anonymous credentials"); - return new AWSStaticCredentialsProvider( - new AnonymousAWSCredentials()); + return AnonymousCredentialsProvider.create(); } else { - String profileName = - (String) props.get("s3fs_credential_profile_name"); + String profileName = (String) props.get("s3fs_credential_profile_name"); // Same instances and order from DefaultAWSCredentialsProviderChain - return new AWSCredentialsProviderChain( - new ProfileCredentialsProvider(profileName), - new EC2ContainerCredentialsProviderWrapper() - ); + ProfileCredentialsProvider.Builder profileBuilder = ProfileCredentialsProvider + .builder(); + if (profileName != null) { + profileBuilder.profileName(profileName); + } + return AwsCredentialsProviderChain.of(profileBuilder.build(), + InstanceProfileCredentialsProvider.create()); } } - + /** * Retrieves the bucket name from a given URI. * @@ -97,12 +95,12 @@ private String getBucketFromUri(URI uri) { * @param uri The URI to handle * @return The region */ - private String getRegionFromUri(URI uri) { - String region = AwsHostNameUtils.parseRegion(uri.getHost(), null); - if (region != null) { - return region; + private Region getRegionFromUri(URI uri) { + Optional region = AwsHostNameUtils.parseSigningRegion(uri.getHost(), null); + if (region.isPresent()) { + return region.get(); } - return Regions.DEFAULT_REGION.getName(); + return Region.US_EAST_1; } /** @@ -116,7 +114,7 @@ public String getEndPointFromUri(URI uri) { } @Override - public synchronized AmazonS3 getAmazonS3(URI uri, Properties props) { + public synchronized S3Client getS3Client(URI uri, Properties props) { // Check if we have a S3 client for this bucket String bucket = getBucketFromUri(uri); if (bucketClientMap.containsKey(bucket)) { @@ -124,13 +122,10 @@ public synchronized AmazonS3 getAmazonS3(URI uri, Properties props) { return bucketClientMap.get(bucket); } log.info("Creating client for bucket " + bucket); - AmazonS3 client = AmazonS3ClientBuilder.standard() - .withCredentials(getCredentialsProvider(props)) - .withClientConfiguration(getClientConfiguration(props)) - .withMetricsCollector(getRequestMetricsCollector(props)) - .withEndpointConfiguration( - new EndpointConfiguration(getEndPointFromUri(uri), getRegionFromUri(uri))) - .build(); + ClientOverrideConfiguration cconf = getOverrideConfiguration(props); + S3Client client = S3Client.builder().credentialsProvider(getCredentialsProvider(props)) + .region(getRegionFromUri(uri)).endpointOverride(URI.create(getEndPointFromUri(uri))) + .overrideConfiguration(cconf).build(); bucketClientMap.put(bucket, client); return client; } diff --git a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java index 2af84c49..0c126fc8 100644 --- a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java +++ b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java @@ -18,17 +18,17 @@ package com.glencoesoftware.omero.zarr; -import com.amazonaws.services.s3.AmazonS3; -import com.upplication.s3fs.S3FileSystem; -import com.upplication.s3fs.S3FileSystemProvider; import java.io.IOException; +import org.carlspring.cloud.storage.s3fs.S3FileSystem; +import org.carlspring.cloud.storage.s3fs.S3FileSystemProvider; +import software.amazon.awssdk.services.s3.S3Client; /** Subclass of S3FileSystem with performance optimizations. */ public class OmeroS3FileSystem extends S3FileSystem { /** Default constructor. */ - public OmeroS3FileSystem(S3FileSystemProvider provider, String key, - AmazonS3 client, String endpoint) { + public OmeroS3FileSystem(S3FileSystemProvider provider, String key, S3Client client, + String endpoint) { super(provider, key, client, endpoint); } diff --git a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java index 8fd8e1c3..9fefe1c8 100644 --- a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java +++ b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java @@ -18,11 +18,6 @@ package com.glencoesoftware.omero.zarr; -import com.amazonaws.services.s3.model.GetObjectRequest; -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectInputStream; -import com.upplication.s3fs.S3Path; -import com.upplication.s3fs.S3ReadOnlySeekableByteChannel; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -37,14 +32,16 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; +import org.carlspring.cloud.storage.s3fs.S3Path; import org.perf4j.slf4j.Slf4JStopWatch; - +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; /** - * Overridden, hybrid version of the implementation from - * {@link S3ReadOnlySeekableByteChannel}. Due to its private visibility on - * nearly all the important instance variables much of the implementation is - * copied verbatim. + * Overridden, hybrid version of the implementation from {@link S3ReadOnlySeekableByteChannel}. Due + * to its private visibility on nearly all the important instance variables much of the + * implementation is copied verbatim. */ public class OmeroS3ReadOnlySeekableByteChannel implements SeekableByteChannel { @@ -55,45 +52,39 @@ public class OmeroS3ReadOnlySeekableByteChannel implements SeekableByteChannel { private long position = 0; /** - * Overridden, hybrid version of the implementation from - * {@link S3ReadOnlySeekableByteChannel}. Our implementation loads the - * entire object in full from S3 without checks for length during + * Overridden, hybrid version of the implementation from {@link S3ReadOnlySeekableByteChannel}. + * Our implementation loads the entire object in full from S3 without checks for length during * object construction. */ public OmeroS3ReadOnlySeekableByteChannel(S3Path path, Set options) - throws IOException { + throws IOException { this.options = Collections.unmodifiableSet(new HashSet<>(options)); if (this.options.contains(StandardOpenOption.WRITE) - || this.options.contains(StandardOpenOption.CREATE) - || this.options.contains(StandardOpenOption.CREATE_NEW) - || this.options.contains(StandardOpenOption.APPEND) - ) { + || this.options.contains(StandardOpenOption.CREATE) + || this.options.contains(StandardOpenOption.CREATE_NEW) + || this.options.contains(StandardOpenOption.APPEND)) { throw new ReadOnlyFileSystemException(); } String bucketName = path.getFileStore().name(); String key = path.getKey(); - GetObjectRequest getObjectRequest = - new GetObjectRequest(bucketName, key); + GetObjectRequest getObjectRequest = GetObjectRequest.builder().bucket(bucketName).key(key) + .build(); - Slf4JStopWatch t0 = new Slf4JStopWatch( - "OmeroS3ReadOnlySeekableByteChannel.getObject", - "s3://" + bucketName + "/" + key); + Slf4JStopWatch t0 = new Slf4JStopWatch("OmeroS3ReadOnlySeekableByteChannel.getObject", + "s3://" + bucketName + "/" + key); try { - S3Object s3Object = - path - .getFileSystem() - .getClient() - .getObject(getObjectRequest); + ResponseInputStream s3Object = path.getFileSystem().getClient() + .getObject(getObjectRequest); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); // The return value of getObjectContent should be copied and // the stream closed as quickly as possible. See // https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/model/S3Object.html#getObjectContent-- - try (S3ObjectInputStream s3Stream = s3Object.getObjectContent()) { + try (s3Object) { byte[] readBuf = new byte[1024 * 1024]; int readLen = 0; - while ((readLen = s3Stream.read(readBuf)) > 0) { + while ((readLen = s3Object.read(readBuf)) > 0) { outputStream.write(readBuf, 0, readLen); } } @@ -108,9 +99,8 @@ public OmeroS3ReadOnlySeekableByteChannel(S3Path path, Set } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public boolean isOpen() { @@ -118,9 +108,8 @@ public boolean isOpen() { } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public long position() { @@ -128,20 +117,17 @@ public long position() { } /** - * Overridden, hybrid version of the implementation from - * {@link S3ReadOnlySeekableByteChannel}. Our implementation does not - * support repositioning within the channel. + * Overridden, hybrid version of the implementation from {@link S3ReadOnlySeekableByteChannel}. + * Our implementation does not support repositioning within the channel. */ @Override - public SeekableByteChannel position(long targetPosition) - throws IOException { + public SeekableByteChannel position(long targetPosition) throws IOException { throw new UnsupportedOperationException(); } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public int read(ByteBuffer dst) throws IOException { @@ -153,9 +139,8 @@ public int read(ByteBuffer dst) throws IOException { } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public SeekableByteChannel truncate(long size) throws IOException { @@ -163,9 +148,8 @@ public SeekableByteChannel truncate(long size) throws IOException { } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public int write(ByteBuffer src) throws IOException { @@ -173,9 +157,8 @@ public int write(ByteBuffer src) throws IOException { } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public long size() throws IOException { @@ -183,9 +166,8 @@ public long size() throws IOException { } /** - * An exact copy of the implementation from - * {@link S3ReadOnlySeekableByteChannel} due to its instance variable - * private visibility. + * An exact copy of the implementation from {@link S3ReadOnlySeekableByteChannel} due to its + * instance variable private visibility. */ @Override public void close() throws IOException { diff --git a/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java b/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java index 4cd2bee7..b05b2e33 100644 --- a/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java +++ b/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java @@ -18,16 +18,15 @@ package com.glencoesoftware.omero.zarr; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; import com.github.benmanes.caffeine.cache.AsyncLoadingCache; import com.github.benmanes.caffeine.cache.Caffeine; +import com.glencoesoftware.omero.zarr.compat.ZArray; +import com.glencoesoftware.omero.zarr.compat.ZarrPath; import java.awt.Dimension; import java.io.IOException; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -56,7 +55,7 @@ public class ZarrPixelBuffer implements PixelBuffer { private final Pixels pixels; /** Root of the OME-NGFF multiscale we are operating on. */ - private final Path root; + private final ZarrPath root; /** Requested resolution level. */ private int resolutionLevel; @@ -74,7 +73,7 @@ public class ZarrPixelBuffer implements PixelBuffer { private final Map rootGroupAttributes; /** Zarr array corresponding to the current resolution level. */ - private ZarrArray array; + private ZArray array; /** * Mapping of Z plane indexes in full resolution to @@ -90,10 +89,10 @@ public class ZarrPixelBuffer implements PixelBuffer { /** Root path vs. metadata cache. */ private final - AsyncLoadingCache> zarrMetadataCache; + AsyncLoadingCache> zarrMetadataCache; - /** Array path vs. ZarrArray cache. */ - private final AsyncLoadingCache zarrArrayCache; + /** Array path vs. ZArray cache */ + private final AsyncLoadingCache zarrArrayCache; /** Supported axes, X and Y are essential. */ public enum Axis { @@ -109,10 +108,10 @@ public enum Axis { * @param pixels Pixels metadata for the pixel buffer * @param root The root of this buffer */ - public ZarrPixelBuffer(Pixels pixels, Path root, Integer maxPlaneWidth, + public ZarrPixelBuffer(Pixels pixels, ZarrPath root, Integer maxPlaneWidth, Integer maxPlaneHeight, - AsyncLoadingCache> zarrMetadataCache, - AsyncLoadingCache zarrArrayCache) + AsyncLoadingCache> zarrMetadataCache, + AsyncLoadingCache zarrArrayCache) throws IOException { log.info("Creating ZarrPixelBuffer"); this.pixels = pixels; @@ -121,7 +120,13 @@ public ZarrPixelBuffer(Pixels pixels, Path root, Integer maxPlaneWidth, this.zarrArrayCache = zarrArrayCache; this.isRemote = root.toString().startsWith("s3://") ? true : false; try { - rootGroupAttributes = this.zarrMetadataCache.get(this.root).get(); + Map tmp = this.zarrMetadataCache.get(this.root).get(); + if (tmp.containsKey("ome")) { + // for ngff challenge data attr are often nested within "ome" key + rootGroupAttributes = (Map) tmp.get("ome"); + } else { + rootGroupAttributes = tmp; + } } catch (ExecutionException | InterruptedException e) { throw new IOException(e); } @@ -158,36 +163,6 @@ public ZarrPixelBuffer(Pixels pixels, Path root, Integer maxPlaneWidth, }); } - /** - * Get Bio-Formats/OMERO pixels type for buffer. - * - * @return See above. - */ - public int getPixelsType() { - DataType dataType = array.getDataType(); - switch (dataType) { - case u1: - return FormatTools.UINT8; - case i1: - return FormatTools.INT8; - case u2: - return FormatTools.UINT16; - case i2: - return FormatTools.INT16; - case u4: - return FormatTools.UINT32; - case i4: - return FormatTools.INT32; - case f4: - return FormatTools.FLOAT; - case f8: - return FormatTools.DOUBLE; - default: - throw new IllegalArgumentException( - "Data type " + dataType + " not supported"); - } - } - /** * Calculates the pixel length of a given NumPy like "shape". * @@ -223,43 +198,43 @@ private void read(byte[] buffer, int[] shape, int[] offset) } try { ByteBuffer asByteBuffer = ByteBuffer.wrap(buffer); - DataType dataType = array.getDataType(); + int dataType = array.getPixelsType(); for (int z = 0; z < planes; z++) { if (axesOrder.containsKey(Axis.Z)) { offset[axesOrder.get(Axis.Z)] = zIndexMap.get(originalZIndex + z); } switch (dataType) { - case u1: - case i1: + case FormatTools.UINT8: + case FormatTools.INT8: array.read(buffer, shape, offset); break; - case u2: - case i2: + case FormatTools.UINT16: + case FormatTools.INT16: { short[] data = (short[]) array.read(shape, offset); asByteBuffer.asShortBuffer().put(data); break; } - case u4: - case i4: + case FormatTools.UINT32: + case FormatTools.INT32: { int[] data = (int[]) array.read(shape, offset); asByteBuffer.asIntBuffer().put(data); break; } - case i8: - { - long[] data = (long[]) array.read(shape, offset); - asByteBuffer.asLongBuffer().put(data); - break; - } - case f4: + // case FormatTools.INT64: + // { + // long[] data = (long[]) array.read(shape, offset); + // asByteBuffer.asLongBuffer().put(data); + // break; + // } + case FormatTools.FLOAT: { float[] data = (float[]) array.read(shape, offset); asByteBuffer.asFloatBuffer().put(data); break; } - case f8: + case FormatTools.DOUBLE: { double[] data = (double[]) array.read(shape, offset); asByteBuffer.asDoubleBuffer().put(data); @@ -284,7 +259,7 @@ private PixelData toPixelData(byte[] buffer) { return null; } PixelData d = new PixelData( - FormatTools.getPixelTypeString(getPixelsType()), + FormatTools.getPixelTypeString(array.getPixelsType()), ByteBuffer.wrap(buffer)); d.setOrder(ByteOrder.BIG_ENDIAN); return d; @@ -300,8 +275,8 @@ public int[][] getChunks() throws IOException { List> datasets = getDatasets(); List chunks = new ArrayList(); for (Map dataset : datasets) { - ZarrArray resolutionArray = ZarrArray.open( - root.resolve(dataset.get("path"))); + ZarrPath dsPath = root.resolve(dataset.get("path")); + ZArray resolutionArray = dsPath.getArray(); int[] shape = resolutionArray.getChunks(); chunks.add(shape); } @@ -843,17 +818,17 @@ public byte[] calculateMessageDigest() throws IOException { @Override public int getByteWidth() { - return FormatTools.getBytesPerPixel(getPixelsType()); + return FormatTools.getBytesPerPixel(array.getPixelsType()); } @Override public boolean isSigned() { - return FormatTools.isSigned(getPixelsType()); + return FormatTools.isSigned(array.getPixelsType()); } @Override public boolean isFloat() { - return FormatTools.isFloatingPoint(getPixelsType()); + return FormatTools.isFloatingPoint(array.getPixelsType()); } @Override @@ -925,6 +900,10 @@ public int getResolutionLevel() { resolutionLevel - (resolutionLevels - 1)); } + public int getPixelsType() { + return array.getPixelsType(); + } + @Override public void setResolutionLevel(int resolutionLevel) { if (resolutionLevel >= resolutionLevels) { @@ -945,10 +924,9 @@ public void setResolutionLevel(int resolutionLevel) { zIndexMap.clear(); } try { - array = zarrArrayCache.get( - root.resolve(Integer.toString(this.resolutionLevel))).get(); - - ZarrArray fullResolutionArray = zarrArrayCache.get( + ZarrPath p = root.resolve(Integer.toString(this.resolutionLevel)); + array = zarrArrayCache.get(p).get(); + ZArray fullResolutionArray = zarrArrayCache.get( root.resolve("0")).get(); if (axesOrder.containsKey(Axis.Z)) { diff --git a/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelsService.java b/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelsService.java index d34e0c16..417f9995 100644 --- a/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelsService.java +++ b/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelsService.java @@ -18,22 +18,14 @@ package com.glencoesoftware.omero.zarr; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; import com.github.benmanes.caffeine.cache.AsyncLoadingCache; import com.github.benmanes.caffeine.cache.Caffeine; -import com.google.common.base.Splitter; -import com.upplication.s3fs.OmeroS3FilesystemProvider; +import com.glencoesoftware.omero.zarr.compat.ZArray; +import com.glencoesoftware.omero.zarr.compat.ZarrInfo; +import com.glencoesoftware.omero.zarr.compat.ZarrPath; import java.io.File; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.FileSystem; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashMap; import java.util.Map; -import java.util.Optional; import ome.api.IQuery; import ome.conditions.LockTimeout; import ome.io.nio.BackOff; @@ -78,10 +70,10 @@ public class ZarrPixelsService extends ome.io.nio.PixelsService { /** Root path vs. metadata cache. */ private final - AsyncLoadingCache> zarrMetadataCache; + AsyncLoadingCache> zarrMetadataCache; /** Array path vs. ZarrArray cache */ - private final AsyncLoadingCache zarrArrayCache; + private final AsyncLoadingCache zarrArrayCache; /** Default constructor. */ public ZarrPixelsService( @@ -113,12 +105,12 @@ public ZarrPixelsService( * @param path path to get Zarr metadata from * @return See above. */ - public static Map getZarrMetadata(Path path) + public static Map getZarrMetadata(ZarrPath path) throws IOException { // FIXME: Really should be ZarrUtils.readAttributes() to allow for // attribute retrieval from either a ZarrArray or ZarrGroup but ZarrPath // is package private at the moment. - return ZarrGroup.open(path).getAttributes(); + return path.getMetadata(); } /** @@ -127,63 +119,8 @@ public static Map getZarrMetadata(Path path) * @param path path to open a Zarr array from * @return See above. */ - public static ZarrArray getZarrArray(Path path) throws IOException { - return ZarrArray.open(path); - } - - /** - * Converts an NGFF root string to a path, initializing a {@link FileSystem} - * if required. - * - * @param ngffDir NGFF directory root - * @return Fully initialized path or null if the NGFF root - * directory has not been specified in configuration. - */ - public static Path asPath(String ngffDir) throws IOException { - if (ngffDir.isEmpty()) { - return null; - } - - try { - URI uri = new URI(ngffDir); - if ("s3".equals(uri.getScheme())) { - if (uri.getUserInfo() != null && !uri.getUserInfo().isEmpty()) { - throw new RuntimeException( - "Found unsupported user information in S3 URI." - + " If you are trying to pass S3 credentials, " - + "use either named profiles or instance credentials."); - } - String query = Optional.ofNullable(uri.getQuery()).orElse(""); - Map params = Splitter.on('&') - .trimResults() - .omitEmptyStrings() - .withKeyValueSeparator('=') - .split(query); - // drop initial "/" - String uriPath = uri.getPath().substring(1); - int first = uriPath.indexOf("/"); - String bucket = "/" + uriPath.substring(0, first); - String rest = uriPath.substring(first + 1); - // FIXME: We might want to support additional S3FS settings in - // the future. See: - // * https://github.com/lasersonlab/Amazon-S3-FileSystem-NIO2 - Map env = new HashMap(); - String profile = params.get("profile"); - if (profile != null) { - env.put("s3fs_credential_profile_name", profile); - } - String anonymous = - Optional.ofNullable(params.get("anonymous")) - .orElse("false"); - env.put("s3fs_anonymous", anonymous); - OmeroS3FilesystemProvider fsp = new OmeroS3FilesystemProvider(); - FileSystem fs = fsp.newFileSystem(uri, env); - return fs.getPath(bucket, rest); - } - } catch (URISyntaxException e) { - // Fall through - } - return Paths.get(ngffDir); + public static ZArray getZarrArray(ZarrPath path) throws IOException { + return path.getArray(); } /** @@ -308,8 +245,10 @@ public ZarrPixelBuffer getLabelImagePixelBuffer(Mask mask) throw new IllegalArgumentException( "No root for Mask:" + mask.getId()); } + ZarrInfo zarrInfo = new ZarrInfo(root); + ZarrPath zarrPath = zarrInfo.getZarrPath(); return new ZarrPixelBuffer( - pixels, asPath(root), maxPlaneWidth, maxPlaneHeight, + pixels, zarrPath, maxPlaneWidth, maxPlaneHeight, zarrMetadataCache, zarrArrayCache); } @@ -335,11 +274,11 @@ protected ZarrPixelBuffer createOmeNgffPixelBuffer(Pixels pixels) { log.debug("No OME-NGFF root"); return null; } - Path root = asPath(uri); - log.info("OME-NGFF root is: " + uri); + ZarrInfo zarrInfo = new ZarrInfo(uri); + log.info("OME-NGFF root is: " + zarrInfo); try { ZarrPixelBuffer v = new ZarrPixelBuffer( - pixels, root, maxPlaneWidth, maxPlaneHeight, + pixels, zarrInfo.getZarrPath(), maxPlaneWidth, maxPlaneHeight, zarrMetadataCache, zarrArrayCache); log.info("Using OME-NGFF pixel buffer"); return v; @@ -348,7 +287,7 @@ protected ZarrPixelBuffer createOmeNgffPixelBuffer(Pixels pixels) { "Getting OME-NGFF pixel buffer failed - " + "attempting to get local data", e); } - } catch (IOException e1) { + } catch (Exception e1) { log.debug( "Failed to find OME-NGFF metadata for Pixels:{}", pixels.getId()); diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArray.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArray.java new file mode 100644 index 00000000..9e8cc2f5 --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArray.java @@ -0,0 +1,56 @@ +package com.glencoesoftware.omero.zarr.compat; + +import java.io.IOException; +import ucar.ma2.InvalidRangeException; + +/** + * Interface for representing a Zarr array with methods for accessing array properties and reading + * data from the array. + */ +public interface ZArray { + + /** + * Gets the shape (dimensions) of the array. + * + * @return an array of integers representing the size of each dimension + */ + public int[] getShape(); + + /** + * Gets the chunk size. + * + * @return an array of integers representing the chunk size. + */ + public int[] getChunks(); + + /** + * Reads data from the array into a provided byte buffer. + * + * @param buffer the byte buffer to read data into + * @param shape the shape of the data to read + * @param offset the offset position to start reading from + * @throws IOException if an I/O error occurs during reading + * @throws InvalidRangeException if the specified range is invalid + */ + public void read(byte[] buffer, int[] shape, int[] offset) + throws IOException, InvalidRangeException; + + /** + * Reads data from the array and returns it as an Object (short[], int[], etc.). + * + * @param shape the shape of the data to read + * @param offset the offset position to start reading from + * @return the data read from the array as an Object + * @throws IOException if an I/O error occurs during reading + * @throws InvalidRangeException if the specified range is invalid + */ + public Object read(int[] shape, int[] offset) throws IOException, InvalidRangeException; + + /** + * Gets the pixel type identifier for this array. + * + * @return an integer representing the pixel type + */ + public int getPixelsType(); + +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArrayv2.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArrayv2.java new file mode 100644 index 00000000..1dae765e --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArrayv2.java @@ -0,0 +1,67 @@ +package com.glencoesoftware.omero.zarr.compat; + +import com.bc.zarr.DataType; +import com.bc.zarr.ZarrArray; +import java.io.IOException; +import loci.formats.FormatTools; +import ucar.ma2.InvalidRangeException; + +class ZArrayv2 implements ZArray { + + private ZarrArray array; + + public ZArrayv2(ZarrArray array) { + this.array = array; + } + + @Override + public int[] getShape() { + return array.getShape(); + } + + @Override + public int[] getChunks() { + return array.getChunks(); + } + + @Override + public void read(byte[] buffer, int[] shape, int[] offset) + throws IOException, InvalidRangeException { + array.read(buffer, shape, offset); + } + + @Override + public Object read(int[] shape, int[] offset) throws IOException, InvalidRangeException { + return array.read(shape, offset); + } + + /** + * Get Bio-Formats/OMERO pixels type for buffer. + * + * @return See above. + */ + @Override + public int getPixelsType() { + DataType dataType = array.getDataType(); + switch (dataType) { + case u1: + return FormatTools.UINT8; + case i1: + return FormatTools.INT8; + case u2: + return FormatTools.UINT16; + case i2: + return FormatTools.INT16; + case u4: + return FormatTools.UINT32; + case i4: + return FormatTools.INT32; + case f4: + return FormatTools.FLOAT; + case f8: + return FormatTools.DOUBLE; + default: + throw new IllegalArgumentException("Data type " + dataType + " not supported"); + } + } +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArrayv3.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArrayv3.java new file mode 100644 index 00000000..2dc52a38 --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZArrayv3.java @@ -0,0 +1,89 @@ +package com.glencoesoftware.omero.zarr.compat; + +import dev.zarr.zarrjava.ZarrException; +import dev.zarr.zarrjava.v3.Array; +import dev.zarr.zarrjava.v3.DataType; +import java.io.IOException; +import java.nio.ByteBuffer; +import loci.formats.FormatTools; +import ucar.ma2.InvalidRangeException; + +class ZArrayv3 implements ZArray { + + private Array array; + + public ZArrayv3(Array array) { + this.array = array; + } + + @Override + public int[] getShape() { + int[] shape = new int[this.array.metadata.shape.length]; + for (int i = 0; i < this.array.metadata.shape.length; i++) { + shape[i] = (int) this.array.metadata.shape[i]; + } + return shape; + } + + @Override + public int[] getChunks() { + int[] chunks = new int[this.array.metadata.chunkShape().length]; + for (int i = 0; i < this.array.metadata.chunkShape().length; i++) { + chunks[i] = (int) this.array.metadata.chunkShape()[i]; + } + return chunks; + } + + @Override + public void read(byte[] buffer, int[] shape, int[] offset) + throws IOException, InvalidRangeException { + try { + long[] offsetLong = new long[offset.length]; + for (int i = 0; i < offset.length; i++) { + offsetLong[i] = offset[i]; + } + ByteBuffer b = array.read(offsetLong, shape).getDataAsByteBuffer(); + System.arraycopy(b.array(), 0, buffer, 0, buffer.length); + } catch (ZarrException e) { + throw new IOException(e); + } + } + + @Override + public Object read(int[] shape, int[] offset) throws IOException, InvalidRangeException { + try { + long[] offsetLong = new long[offset.length]; + for (int i = 0; i < offset.length; i++) { + offsetLong[i] = offset[i]; + } + return array.read(offsetLong, shape).copyTo1DJavaArray(); + } catch (ZarrException e) { + throw new IOException(e); + } + } + + @Override + public int getPixelsType() { + DataType dataType = array.metadata.dataType; + switch (dataType) { + case UINT8: + return FormatTools.UINT8; + case INT8: + return FormatTools.INT8; + case UINT16: + return FormatTools.UINT16; + case INT16: + return FormatTools.INT16; + case UINT32: + return FormatTools.UINT32; + case INT32: + return FormatTools.INT32; + case FLOAT32: + return FormatTools.FLOAT; + case FLOAT64: + return FormatTools.DOUBLE; + default: + throw new IllegalArgumentException("Data type " + dataType + " not supported"); + } + } +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrInfo.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrInfo.java new file mode 100644 index 00000000..31f18aac --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrInfo.java @@ -0,0 +1,297 @@ +package com.glencoesoftware.omero.zarr.compat; + +import com.bc.zarr.ZarrGroup; +import com.google.common.base.Splitter; +import dev.zarr.zarrjava.store.FilesystemStore; +import dev.zarr.zarrjava.store.HttpStore; +import dev.zarr.zarrjava.store.S3Store; +import dev.zarr.zarrjava.store.StoreHandle; +import dev.zarr.zarrjava.v3.Group; +import dev.zarr.zarrjava.v3.GroupMetadata; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.FileSystem; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import org.apache.maven.artifact.versioning.ComparableVersion; +import org.carlspring.cloud.storage.s3fs.OmeroS3FilesystemProvider; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.services.s3.S3Client; + +/** + * Tries to determine some properties of the zarr path, if it's remote or local, and the zarr and + * ngff versions. To access the zarr metadata/array use getZarrPath(). + */ +public class ZarrInfo { + private static final org.slf4j.Logger log = LoggerFactory.getLogger(ZarrInfo.class); + + public static final ComparableVersion ZARR_V2 = new ComparableVersion("2"); + public static final ComparableVersion ZARR_V3 = new ComparableVersion("3"); + public static final ComparableVersion NGFF_V0_4 = new ComparableVersion("0.4"); + public static final ComparableVersion NGFF_V0_5 = new ComparableVersion("0.5"); + + private ComparableVersion zarrVersion; + + private ComparableVersion ngffVersion; + + private String location; + + /** + * Enum representing different storage types for Zarr data. + */ + public enum StorageType { + FILE, S3, HTTP + } + + private StorageType storageType; + + /** + * Create a new ZarrInfo. + * + * @param location the location of the zarr + * @throws IOException if the zarr can't be read + */ + public ZarrInfo(String location) throws IOException { + this.location = location.endsWith("/") ? location.substring(0, location.length() - 1) + : location; + checkProperties(); + } + + /** + * Tries to check some properties of the zarr path, if it's remote or local, and the zarr and + * ngff versions. + * + * @throws IOException If the zarr can't be read + */ + private void checkProperties() throws IOException { + URI uri; + try { + uri = new URI(location); + } catch (URISyntaxException e) { + throw new IOException("Invalid URI: " + location, e); + } + + if (uri.getScheme() == null || "file".equals(uri.getScheme().toLowerCase())) { + File test = new File(location); + if (!test.isDirectory()) { + throw new IOException("Not a directory: " + location); + } + if (!test.canRead()) { + throw new IOException("Cannot read directory: " + location); + } + storageType = StorageType.FILE; + } else { + String scheme = uri.getScheme().toLowerCase(); + if (scheme.startsWith("http")) { + storageType = StorageType.HTTP; + } else if (scheme.equals("s3")) { + storageType = StorageType.S3; + } else { + throw new IOException("Unsupported scheme: " + scheme); + } + } + + // checking for zarr v3 + try { + StoreHandle sh = asStoreHandle(); + GroupMetadata md = Group.open(sh).metadata; + zarrVersion = new ComparableVersion("3"); // if that works it should be v3 + try { + ngffVersion = new ComparableVersion( + ((Map) md.attributes.get("ome")).get("version").toString()); + } catch (Exception e) { + log.debug("Failed to get ngff version from zarr, set to 0.5"); + ngffVersion = new ComparableVersion("0.5"); + } + return; + } catch (Exception e) { + log.debug("Not zarr v3:", e); + // fall through + } + + // checking for zarr v2 + try { + Map attr = ZarrGroup.open(asPath(location)).getAttributes(); + zarrVersion = new ComparableVersion("2"); // if that works it must be v2 + try { + List tmp = (List) attr.get("multiscales"); + ngffVersion = new ComparableVersion( + ((Map) tmp.get(0)).get("version").toString()); + } catch (Exception e) { + log.debug("Failed to get ngff version from zarr, set to 0.4"); + ngffVersion = new ComparableVersion("0.4"); + // if it's zarr v2 then we can actually safely assume it's ngff v0.4 + } + return; + } catch (Exception e) { + log.debug("Not zarr v2:", e); + throw new IOException("Failed to determine zarr version"); + } + } + + /** + * Gets the Zarr version. + * + * @return the Zarr version as a string + */ + public ComparableVersion getZarrVersion() { + return zarrVersion; + } + + /** + * Gets the NGFF version. + * + * @return the NGFF version as a string + */ + public ComparableVersion getNgffVersion() { + return ngffVersion; + } + + /** + * Gets the path. + * + * @return the path + */ + public String getLocation() { + return location; + } + + /** + * Gets the Zarr path. + * + * @return the Zarr path + * @throws IOException If the zarr can't be read + */ + public ZarrPath getZarrPath() throws IOException { + if (zarrVersion.equals(ZARR_V2)) { + return new ZarrPathv2(asPath(location)); + } else { + return new ZarrPathv3(asStoreHandle()); + } + } + + /** + * Converts an NGFF root string to a path, initializing a {@link FileSystem} if required For + * zarr version 2, resp NGFF 0.4 + * + * @return Fully initialized path or null if the NGFF root directory has not been + * specified in configuration. + * @throws IOException If the path can't be read + */ + private Path asPath(String location) throws IOException { + try { + URI uri = new URI(location); + String scheme = uri.getScheme() != null ? uri.getScheme().toLowerCase() : "file"; + if (scheme.startsWith("http")) { + String s3loc = location.replaceFirst("https?", "s3"); + return asPath(s3loc + "?anonymous=true"); + } + if (scheme.equals("s3")) { + if (uri.getUserInfo() != null && !uri.getUserInfo().isEmpty()) { + throw new RuntimeException("Found unsupported user information in S3 URI." + + " If you are trying to pass S3 credentials, " + + "use either named profiles or instance credentials."); + } + String query = Optional.ofNullable(uri.getQuery()).orElse(""); + Map params = Splitter.on('&').trimResults().omitEmptyStrings() + .withKeyValueSeparator('=').split(query); + // drop initial "/" + String uriPath = uri.getPath().substring(1); + int first = uriPath.indexOf("/"); + String bucket = "/" + uriPath.substring(0, first); + String rest = uriPath.substring(first + 1); + // FIXME: We might want to support additional S3FS settings in + // the future. See: + // * https://github.com/lasersonlab/Amazon-S3-FileSystem-NIO2 + Map env = new HashMap(); + String profile = params.get("profile"); + if (profile != null) { + env.put("s3fs_credential_profile_name", profile); + } + String anonymous = Optional.ofNullable(params.get("anonymous")).orElse("false"); + env.put("s3fs_anonymous", anonymous); + OmeroS3FilesystemProvider fsp = new OmeroS3FilesystemProvider(); + FileSystem fs = fsp.newFileSystem(uri, env); + return fs.getPath(bucket, rest); + } + } catch (URISyntaxException e) { + // we made sure earlier that location is a valid URI + } + return Path.of(location); + } + + /** + * Return a store handle. For zarr version 3, resp NGFF > 0.5 + * + * @return the store handle + */ + private StoreHandle asStoreHandle() { + try { + URI uri = new URI(location); + String scheme = uri.getScheme() != null ? uri.getScheme().toLowerCase() : "file"; + String locationNoParams = location.replaceAll("/?\\?.+", ""); + String store = locationNoParams.substring(0, location.lastIndexOf("/")); + String zarr = locationNoParams.substring(location.lastIndexOf("/") + 1); + + if (scheme.startsWith("http")) { + return new HttpStore(store).resolve(zarr); + } else if (scheme.startsWith("s3")) { + String[] tmp = store.replaceFirst("s3://", "").split("/"); + // String host = tmp[0]; + String bucket = tmp[1]; + String rest = String.join("/", Arrays.copyOfRange(tmp, 2, tmp.length)); + + // Use the OmeroS3FilesystemProvider to create AmazonS3 client + String query = Optional.ofNullable(uri.getQuery()).orElse(""); + Map params = Splitter.on('&').trimResults().omitEmptyStrings() + .withKeyValueSeparator('=').split(query); + Map env = new HashMap(); + String profile = params.get("profile"); + if (profile != null) { + env.put("s3fs_credential_profile_name", profile); + } + String anonymous = Optional.ofNullable(params.get("anonymous")).orElse("false"); + env.put("s3fs_anonymous", anonymous); + OmeroS3FilesystemProvider fsp = new OmeroS3FilesystemProvider(); + S3Client client = fsp.createAmazonS3(uri, env); + + // Create s3 client manually: + // AmazonS3 client = AmazonS3ClientBuilder.standard() + // .withEndpointConfiguration(new EndpointConfiguration(host, null)) + // .withCredentials(new AWSCredentialsProvider() { + // @Override + // public AWSCredentials getCredentials() { + // return new AnonymousAWSCredentials(); + // } + + // @Override + // public void refresh() { + // // do nothing + // } + // }).build(); + return new S3Store(client, bucket, rest).resolve(zarr); + } else { + return new FilesystemStore(store).resolve(zarr); + } + } catch (URISyntaxException e) { + // we checked earlier that location is a valid URI + } + return null; + } + + @Override + public String toString() { + return "ZarrInfo{" + "location='" + location + '\'' + ", storageType=" + storageType + + ", zarrVersion=" + zarrVersion + ", ngffVersion=" + ngffVersion + '}'; + } + + public StorageType getStorageType() { + return storageType; + } +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPath.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPath.java new file mode 100644 index 00000000..a27c0571 --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPath.java @@ -0,0 +1,35 @@ +package com.glencoesoftware.omero.zarr.compat; + +import java.io.IOException; +import java.util.Map; + +/** + * Interface for representing a path within a Zarr hierarchy. Provides methods for navigating the + * Zarr structure, accessing metadata, and retrieving array information. + */ +public interface ZarrPath { + + /** + * Resolves a relative path against this ZarrPath. + * + * @param path the relative path to resolve + * @return a new ZarrPath representing the resolved path + */ + public ZarrPath resolve(String path); + + /** + * Retrieves the metadata associated with this Zarr path. + * + * @return a map containing the metadata key-value pairs + * @throws IOException if an I/O error occurs while reading the metadata + */ + public Map getMetadata() throws IOException; + + /** + * Gets the Zarr array associated with this path. + * + * @return the ZArray object representing the array at this path + * @throws IOException if an I/O error occurs while accessing the array + */ + public ZArray getArray() throws IOException; +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPathv2.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPathv2.java new file mode 100644 index 00000000..2079ee83 --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPathv2.java @@ -0,0 +1,53 @@ +package com.glencoesoftware.omero.zarr.compat; + +import com.bc.zarr.ZarrArray; +import com.bc.zarr.ZarrGroup; +import java.io.IOException; +import java.nio.file.Path; +import java.util.Map; + +class ZarrPathv2 implements ZarrPath { + + private Path path; + + public ZarrPathv2(Path path) { + this.path = path; + } + + @Override + public ZarrPath resolve(String path) { + return new ZarrPathv2(this.path.resolve(path)); + } + + @Override + public Map getMetadata() throws IOException { + return ZarrGroup.open(path).getAttributes(); + } + + @Override + public ZArray getArray() throws IOException { + return new ZArrayv2(ZarrArray.open(path)); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ZarrPathv2 that = (ZarrPathv2) obj; + return path.toString().equals(that.path.toString()); + } + + @Override + public int hashCode() { + return path.toString().hashCode(); + } + + @Override + public String toString() { + return path.toString(); + } +} diff --git a/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPathv3.java b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPathv3.java new file mode 100644 index 00000000..b3690efc --- /dev/null +++ b/src/main/java/com/glencoesoftware/omero/zarr/compat/ZarrPathv3.java @@ -0,0 +1,83 @@ +package com.glencoesoftware.omero.zarr.compat; + +import dev.zarr.zarrjava.store.StoreHandle; +import dev.zarr.zarrjava.v3.Array; +import dev.zarr.zarrjava.v3.Group; +import dev.zarr.zarrjava.v3.Node; +import java.io.IOException; +import java.util.Map; + +class ZarrPathv3 implements ZarrPath { + + private StoreHandle path; + private Group group; + private String key = null; + + public ZarrPathv3(StoreHandle path) { + try { + this.path = path; + this.group = Group.open(path); + } catch (IOException e) { + e.printStackTrace(); + } + } + + private ZarrPathv3(ZarrPathv3 parent, String resolvePath) { + // this can either resolve to another group or an array + this.path = parent.path.resolve(resolvePath); + try { + // it's a group + this.group = Group.open(this.path); + } catch (IOException e) { + // is not a group; points to array from parent group + // with resolvePath as key. + this.group = parent.group; + this.key = resolvePath; + } + } + + @Override + public ZarrPath resolve(String resolvePath) { + return new ZarrPathv3(this, resolvePath); + } + + @Override + public Map getMetadata() throws IOException { + return group.metadata.attributes; + } + + @Override + public ZArray getArray() throws IOException { + Array array; + try { + Node node = group.get(key); + array = (Array) node; + } catch (Exception e) { + throw new IOException(e); + } + return new ZArrayv3(array); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ZarrPathv3 that = (ZarrPathv3) obj; + return toString().equals(that.toString()); + } + + @Override + public int hashCode() { + return toString().hashCode(); + } + + @Override + public String toString() { + return key == null ? "(Group) " + path.toString() + : "(Array) " + path.toString() + ":" + key; + } +} diff --git a/src/main/java/com/upplication/s3fs/OmeroS3FilesystemProvider.java b/src/main/java/org/carlspring/cloud/storage/s3fs/OmeroS3FilesystemProvider.java similarity index 63% rename from src/main/java/com/upplication/s3fs/OmeroS3FilesystemProvider.java rename to src/main/java/org/carlspring/cloud/storage/s3fs/OmeroS3FilesystemProvider.java index ce232a34..134d2c6f 100644 --- a/src/main/java/com/upplication/s3fs/OmeroS3FilesystemProvider.java +++ b/src/main/java/org/carlspring/cloud/storage/s3fs/OmeroS3FilesystemProvider.java @@ -16,16 +16,12 @@ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ -package com.upplication.s3fs; - -import static com.upplication.s3fs.AmazonS3Factory.ACCESS_KEY; -import static com.upplication.s3fs.AmazonS3Factory.SECRET_KEY; +package org.carlspring.cloud.storage.s3fs; import com.glencoesoftware.omero.zarr.OmeroAmazonS3ClientFactory; import com.glencoesoftware.omero.zarr.OmeroS3FileSystem; import com.glencoesoftware.omero.zarr.OmeroS3ReadOnlySeekableByteChannel; import com.google.common.base.Preconditions; -import com.upplication.s3fs.attribute.S3BasicFileAttributes; import java.io.IOException; import java.net.URI; import java.nio.channels.SeekableByteChannel; @@ -40,16 +36,20 @@ import java.util.Map; import java.util.Properties; import java.util.Set; - +import org.carlspring.cloud.storage.s3fs.attribute.S3BasicFileAttributes; +import software.amazon.awssdk.services.s3.S3Client; /** Subclass of S3FileSystemProvider with performance optimizations. */ public class OmeroS3FilesystemProvider extends S3FileSystemProvider { + private static final String ACCESS_KEY = "AWS_ACCESS_KEY_ID"; + private static final String SECRET_KEY = "AWS_SECRET_ACCESS_KEY"; + /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#newFileSystem(URI, Map)}. Our implementation - * ensures that a new filesystem is created every time and not registered - * with the JVM wide fileSystems map. + * {@link S3FileSystemProvider#newFileSystem(URI, Map)}. Our implementation ensures that a new + * filesystem is created every time and not registered with the JVM wide + * fileSystems map. */ @Override public FileSystem newFileSystem(URI uri, Map env) { @@ -63,21 +63,19 @@ public FileSystem newFileSystem(URI uri, Map env) { } /** - * An exact copy of the implementation from {@link S3FileSystemProvider} - * due to its private visibility. + * An exact copy of the implementation from {@link S3FileSystemProvider} due to its private + * visibility. */ private void validateProperties(Properties props) { Preconditions.checkArgument( - (props.getProperty(ACCESS_KEY) == null && props.getProperty(SECRET_KEY) == null) - || (props.getProperty(ACCESS_KEY) != null - && props.getProperty(SECRET_KEY) != null), - "%s and %s should both be provided or should both be omitted", - ACCESS_KEY, SECRET_KEY); + (props.getProperty(ACCESS_KEY) == null && props.getProperty(SECRET_KEY) == null) + || (props.getProperty(ACCESS_KEY) != null && props.getProperty(SECRET_KEY) != null), + "%s and %s should both be provided or should both be omitted", ACCESS_KEY, SECRET_KEY); } /** - * An exact copy of the implementation from {@link S3FileSystemProvider} - * due to its private visibility. + * An exact copy of the implementation from {@link S3FileSystemProvider} due to its private + * visibility. */ private Properties getProperties(URI uri, Map env) { Properties props = loadAmazonProperties(); @@ -94,32 +92,40 @@ private Properties getProperties(URI uri, Map env) { return props; } - /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#createFileSystem(URI, Properties)}. Our - * implementation uses our own {@link OmeroS3FileSystem}. + * {@link S3FileSystemProvider#createFileSystem(URI, Properties)}. Our implementation uses our + * own {@link OmeroS3FileSystem}. */ @Override public S3FileSystem createFileSystem(URI uri, Properties props) { - return new OmeroS3FileSystem( - this, getFileSystemKey(uri, props), getAmazonS3(uri, props), uri.getHost()); + return new OmeroS3FileSystem(this, getFileSystemKey(uri, props), getS3Client(uri, props), + uri.getHost()); + } + + /** + * Create an Amazon S3 client from the given URI and environment. + */ + public S3Client createAmazonS3(URI uri, Map env) { + Properties props = getProperties(uri, env); + validateProperties(props); + return getS3Client(uri, props); } /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#getAmazonS3Factory(Properties)}. Our - * implementation uses our own {@link OmeroAmazonS3ClientFactory}. + * {@link S3FileSystemProvider#getAmazonS3Factory(Properties)}. Our implementation uses our own + * {@link OmeroAmazonS3ClientFactory}. */ @Override - protected AmazonS3Factory getAmazonS3Factory(Properties props) { + protected S3ClientFactory getS3Factory(Properties props) { return new OmeroAmazonS3ClientFactory(); } /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#checkAccess(Path, AccessMode...)}. Our - * implementation is a no-op, effectively disabling access checks. + * {@link S3FileSystemProvider#checkAccess(Path, AccessMode...)}. Our implementation is a no-op, + * effectively disabling access checks. */ @Override public void checkAccess(Path path, AccessMode... modes) throws IOException { @@ -129,8 +135,8 @@ public void checkAccess(Path path, AccessMode... modes) throws IOException { /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#exists(S3Path)}. Our - * implementation is a no-op, effectively disabling existence checks. + * {@link S3FileSystemProvider#exists(S3Path)}. Our implementation is a no-op, effectively + * disabling existence checks. */ @Override public boolean exists(S3Path path) { @@ -139,49 +145,44 @@ public boolean exists(S3Path path) { /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#newByteChannel(Path, Set, FileAttribute...)}. - * Our implementation uses our own - * {@link OmeroS3ReadOnlySeekableByteChannel}. + * {@link S3FileSystemProvider#newByteChannel(Path, Set, FileAttribute...)}. Our implementation + * uses our own {@link OmeroS3ReadOnlySeekableByteChannel}. */ @Override - public SeekableByteChannel newByteChannel( - Path path, Set options, FileAttribute... attrs) - throws IOException { + public SeekableByteChannel newByteChannel(Path path, Set options, + FileAttribute... attrs) throws IOException { S3Path s3Path = toS3Path(path); if (options.isEmpty() || options.contains(StandardOpenOption.READ)) { if (options.contains(StandardOpenOption.WRITE)) { - throw new UnsupportedOperationException( - "Can't read and write one on channel" - ); + throw new UnsupportedOperationException("Can't read and write one on channel"); } return new OmeroS3ReadOnlySeekableByteChannel(s3Path, options); } else { - return new S3SeekableByteChannel(s3Path, options); + return new S3SeekableByteChannel(s3Path, options, false); } } /** - * An exact copy of the implementation from {@link S3FileSystemProvider} - * due to its private visibility. + * An exact copy of the implementation from {@link S3FileSystemProvider} due to its private + * visibility. */ private S3Path toS3Path(Path path) { - Preconditions.checkArgument(path instanceof S3Path, - "path must be an instance of %s", S3Path.class.getName()); + Preconditions.checkArgument(path instanceof S3Path, "path must be an instance of %s", + S3Path.class.getName()); return (S3Path) path; } /** * Overridden, hybrid version of the implementation from - * {@link S3FileSystemProvider#readAttributes(Path, Class, LinkOption...)}. - * Our implementation is a no-op, effectively using the same set of - * read-only attributes for every file. + * {@link S3FileSystemProvider#readAttributes(Path, Class, LinkOption...)}. Our implementation + * is a no-op, effectively using the same set of read-only attributes for every file. */ @Override - public A readAttributes( - Path path, Class type, LinkOption... options) throws IOException { + public A readAttributes(Path path, Class type, + LinkOption... options) throws IOException { S3Path s3path = (S3Path) path; - BasicFileAttributes attrs = new S3BasicFileAttributes( - s3path.getKey(), null, 0, true, false); + BasicFileAttributes attrs = new S3BasicFileAttributes(s3path.getKey(), null, 0, true, + false); return type.cast(attrs); } } diff --git a/src/test/java/com/glencoesoftware/omero/zarr/TestZarrInfo.java b/src/test/java/com/glencoesoftware/omero/zarr/TestZarrInfo.java new file mode 100644 index 00000000..129192c2 --- /dev/null +++ b/src/test/java/com/glencoesoftware/omero/zarr/TestZarrInfo.java @@ -0,0 +1,159 @@ +package com.glencoesoftware.omero.zarr; + +import com.glencoesoftware.omero.zarr.compat.ZarrInfo; +import com.glencoesoftware.omero.zarr.compat.ZarrInfo.StorageType; +import java.io.IOException; +import java.nio.file.Path; +import org.apache.maven.artifact.versioning.ComparableVersion; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +/** + * Test ZarrInfo. + */ +public class TestZarrInfo { + + @Rule + public TemporaryFolder tmpDir = new TemporaryFolder(); + + /** + * Test local storage, zarr v2. + * + * @throws IOException If an I/O error occurs. + */ + @Test + public void testLocalV2() throws IOException { + String path = writeTestZarr(1, 1, 1, 256, 256, "uint8", 1).toString() + "/0"; + + ZarrInfo zp = new ZarrInfo(path); + System.out.println("testLocalV2: " + zp); + Assert.assertEquals(zp.getStorageType(), StorageType.FILE); + Assert.assertEquals(new ComparableVersion("2"), zp.getZarrVersion()); + Assert.assertEquals(new ComparableVersion("0.4"), zp.getNgffVersion()); + } + + /** + * Test local storage, zarr v3. + * + * @throws IOException If an I/O error occurs. + */ + @Test + public void testLocalV3() throws IOException { + // TODO: implement + // String path = writeTestZarr(1, 1, 1, 256, 256, "uint8", 1).toString() + "/0"; + // ZarrInfo zp = new ZarrInfo(path); + // Assert.assertEquals(zp.getStorageType(), StorageType.FILE); + // Assert.assertEquals(new ComparableVersion("3"), zp.getZarrVersion()); + // Assert.assertEquals(new ComparableVersion("0.5"), zp.getNgffVersion()); + } + + /** + * Test HTTP storage, zarr v2. + * + * @throws IOException If an I/O error occurs. + */ + @Test + public void testHTTPV2() throws IOException { + ZarrInfo zp = new ZarrInfo("https://s3.ltd.ovh/public/cat_z2.ome.zarr/0"); + System.out.println("testHTTPV2: " + zp); + Assert.assertEquals(zp.getStorageType(), StorageType.HTTP); + Assert.assertEquals(new ComparableVersion("2"), zp.getZarrVersion()); + Assert.assertEquals(new ComparableVersion("0.4"), zp.getNgffVersion()); + } + + /** + * Test HTTP storage, zarr v3. + * + * @throws IOException If an I/O error occurs. + */ + @Test + public void testHTTPV3() throws IOException { + ZarrInfo zp = new ZarrInfo("https://s3.ltd.ovh/public/cat_z3.ome.zarr/0"); + System.out.println("testHTTPV3: " + zp); + Assert.assertEquals(zp.getStorageType(), StorageType.HTTP); + Assert.assertEquals(new ComparableVersion("3"), zp.getZarrVersion()); + Assert.assertEquals(new ComparableVersion("0.5"), zp.getNgffVersion()); + } + + /** + * Test public S3 storage, zarr v2. + * + * @throws IOException If an I/O error occurs. + */ + @Test + public void testS3V2Public() throws IOException { + ZarrInfo zp = new ZarrInfo("s3://s3.ltd.ovh/public/cat_z2.ome.zarr/0?anonymous=true"); + System.out.println("testS3V2Public: " + zp); + Assert.assertEquals(zp.getStorageType(), StorageType.S3); + Assert.assertEquals(new ComparableVersion("2"), zp.getZarrVersion()); + Assert.assertEquals(new ComparableVersion("0.4"), zp.getNgffVersion()); + } + + /** + * Test public S3 storage, zarr v3. + * + * @throws IOException If an I/O error occurs. + */ + @Test + public void testS3V3Public() throws IOException { + ZarrInfo zp = new ZarrInfo("s3://s3.ltd.ovh/public/cat_z3.ome.zarr/0?anonymous=true"); + System.out.println("testS3V3Public: " + zp); + Assert.assertEquals(zp.getStorageType(), StorageType.S3); + Assert.assertEquals(new ComparableVersion("3"), zp.getZarrVersion()); + Assert.assertEquals(new ComparableVersion("0.5"), zp.getNgffVersion()); + } + + // /** + // * Test private S3 storage, zarr v2. + // * + // * @throws IOException If an I/O error occurs. + // */ + // @Test + // public void testS3V2Private() throws IOException { + // ZarrInfo zp = new ZarrInfo("s3://s3.ltd.ovh/private/cat_z2.ome.zarr/0?profile=ltd"); + // System.out.println("testS3V2Private: " + zp); + // Assert.assertEquals(zp.getStorageType(), StorageType.S3); + // Assert.assertEquals(new ComparableVersion("2"), zp.getZarrVersion()); + // Assert.assertEquals(new ComparableVersion("0.4"), zp.getNgffVersion()); + // } + + // /** + // * Test private S3 storage, zarr v3. + // * + // * @throws IOException If an I/O error occurs. + // */ + // @Test + // public void testS3V3Private() throws IOException { + // ZarrInfo zp = new ZarrInfo("s3://s3.ltd.ovh/private/cat_z3.ome.zarr/0?profile=ltd"); + // System.out.println("testS3V3Private: " + zp); + // Assert.assertEquals(zp.getStorageType(), StorageType.S3); + // Assert.assertEquals(new ComparableVersion("3"), zp.getZarrVersion()); + // Assert.assertEquals(new ComparableVersion("0.5"), zp.getNgffVersion()); + // } + + /** + * Write a test Zarr file. + * + * @param sizeT Number of time points. + * @param sizeC Number of channels. + * @param sizeZ Number of z-sections. + * @param sizeY Number of rows. + * @param sizeX Number of columns. + * @param pixelType Pixel type. + * @param resolutions Number of resolutions. + * @return Path to the test Zarr file. + * @throws IOException If an I/O error occurs. + */ + public Path writeTestZarr(int sizeT, int sizeC, int sizeZ, int sizeY, int sizeX, + String pixelType, int resolutions) throws IOException { + Path input = ZarrPixelBufferTest.fake("sizeT", Integer.toString(sizeT), "sizeC", + Integer.toString(sizeC), "sizeZ", Integer.toString(sizeZ), "sizeY", + Integer.toString(sizeY), "sizeX", Integer.toString(sizeX), "pixelType", pixelType, + "resolutions", Integer.toString(resolutions)); + Path output = tmpDir.getRoot().toPath().resolve("output.zarr"); + ZarrPixelBufferTest.assertBioFormats2Raw(input, output); + return output; + } +} diff --git a/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelBufferTest.java b/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelBufferTest.java index 97e408ec..be7a68df 100644 --- a/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelBufferTest.java +++ b/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelBufferTest.java @@ -19,11 +19,11 @@ package com.glencoesoftware.omero.zarr; import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.benmanes.caffeine.cache.Caffeine; import com.glencoesoftware.bioformats2raw.Converter; import com.glencoesoftware.omero.zarr.ZarrPixelBuffer.Axis; +import com.glencoesoftware.omero.zarr.compat.ZarrInfo; import java.awt.Dimension; import java.io.File; import java.io.IOException; @@ -44,7 +44,6 @@ import ome.io.nio.DimensionsOutOfBoundsException; import ome.model.core.Pixels; import ome.model.enums.DimensionOrder; -import ome.model.enums.PixelsType; import ome.util.PixelData; import org.junit.Assert; import org.junit.Rule; @@ -62,18 +61,12 @@ public class ZarrPixelBufferTest { public TemporaryFolder tmpDir = new TemporaryFolder(); /** Constructor. */ - public ZarrPixelBuffer createPixelBuffer( - Pixels pixels, Path path, - Integer maxPlaneWidth, Integer maxPlaneHeight) throws IOException { - return new ZarrPixelBuffer( - pixels, path, maxPlaneWidth, maxPlaneHeight, - Caffeine.newBuilder() - .maximumSize(0) - .buildAsync(ZarrPixelsService::getZarrMetadata), - Caffeine.newBuilder() - .maximumSize(0) - .buildAsync(ZarrPixelsService::getZarrArray) - ); + public ZarrPixelBuffer createPixelBuffer(Pixels pixels, Path path, Integer maxPlaneWidth, + Integer maxPlaneHeight) throws IOException { + ZarrInfo zarrInfo = new ZarrInfo(path.toString()); + return new ZarrPixelBuffer(pixels, zarrInfo.getZarrPath(), maxPlaneWidth, maxPlaneHeight, + Caffeine.newBuilder().maximumSize(0).buildAsync(ZarrPixelsService::getZarrMetadata), + Caffeine.newBuilder().maximumSize(0).buildAsync(ZarrPixelsService::getZarrArray)); } /** @@ -81,10 +74,10 @@ public ZarrPixelBuffer createPixelBuffer( * * @param additionalArgs CLI arguments as needed beyond "input output" */ - void assertBioFormats2Raw(Path input, Path output, String... additionalArgs) - throws IOException { + static void assertBioFormats2Raw(Path input, Path output, String... additionalArgs) + throws IOException { List args = new ArrayList( - Arrays.asList(new String[] { "--compression", "null" })); + Arrays.asList(new String[] { "--compression", "null" })); for (String arg : additionalArgs) { args.add(arg); } @@ -93,10 +86,9 @@ void assertBioFormats2Raw(Path input, Path output, String... additionalArgs) try { Converter converter = new Converter(); CommandLine cli = new CommandLine(converter); - cli.execute(args.toArray(new String[]{})); + cli.execute(args.toArray(new String[] {})); Assert.assertTrue(Files.exists(output.resolve(".zattrs"))); - Assert.assertTrue(Files.exists( - output.resolve("OME").resolve("METADATA.ome.xml"))); + Assert.assertTrue(Files.exists(output.resolve("OME").resolve("METADATA.ome.xml"))); } catch (RuntimeException rt) { throw rt; } catch (Throwable t) { @@ -120,21 +112,21 @@ static Path fake(Map options) { /** * Create a Bio-Formats fake INI file to use for testing. * - * @param options map of the options to assign as part of the fake filename - * from the allowed keys - * @param series map of the integer series index and options map (same format - * as options to add to the fake INI content - * @see fake file specification + * @param options map of the options to assign as part of the fake filename from the allowed + * keys + * @param series map of the integer series index and options map (same format as + * options to add to the fake INI content + * @see fake + * file specification * @return path to the fake INI file that has been created */ - static Path fake(Map options, - Map> series) { + static Path fake(Map options, Map> series) { return fake(options, series, null); } - static Path fake(Map options, - Map> series, - Map originalMetadata) { + static Path fake(Map options, Map> series, + Map originalMetadata) { StringBuilder sb = new StringBuilder(); sb.append("image"); if (options != null) { @@ -151,8 +143,7 @@ static Path fake(Map options, if (originalMetadata != null) { lines.add("[GlobalMetadata]"); for (String key : originalMetadata.keySet()) { - lines.add(String.format( - "%s=%s", key, originalMetadata.get(key))); + lines.add(String.format("%s=%s", key, originalMetadata.get(key))); } } if (series != null) { @@ -160,8 +151,7 @@ static Path fake(Map options, Map seriesOptions = series.get(s); lines.add(String.format("[series_%d]", s)); for (String key : seriesOptions.keySet()) { - lines.add(String.format( - "%s=%s", key, seriesOptions.get(key))); + lines.add(String.format("%s=%s", key, seriesOptions.get(key))); } } } @@ -170,7 +160,7 @@ static Path fake(Map options, String iniPath = iniAsFile.getAbsolutePath(); String fakePath = iniPath.substring(0, iniPath.length() - 4); Path fake = Paths.get(fakePath); - Files.write(fake, new byte[]{}); + Files.write(fake, new byte[] {}); Files.write(ini, lines); iniAsFile.deleteOnExit(); File fakeAsFile = fake.toFile(); @@ -182,22 +172,12 @@ static Path fake(Map options, } /** Write Zarr multiscales attributes. */ - public Path writeTestZarr( - int sizeT, - int sizeC, - int sizeZ, - int sizeY, - int sizeX, - String pixelType, - String... options) throws IOException { - - Path input = fake( - "sizeT", Integer.toString(sizeT), - "sizeC", Integer.toString(sizeC), - "sizeZ", Integer.toString(sizeZ), - "sizeY", Integer.toString(sizeY), - "sizeX", Integer.toString(sizeX), - "pixelType", pixelType); + public Path writeTestZarr(int sizeT, int sizeC, int sizeZ, int sizeY, int sizeX, + String pixelType, String... options) throws IOException { + + Path input = fake("sizeT", Integer.toString(sizeT), "sizeC", Integer.toString(sizeC), + "sizeZ", Integer.toString(sizeZ), "sizeY", Integer.toString(sizeY), "sizeX", + Integer.toString(sizeX), "pixelType", pixelType); Path output = tmpDir.getRoot().toPath().resolve("output.zarr"); assertBioFormats2Raw(input, output, options); return output; @@ -212,17 +192,12 @@ public void testGetChunks() throws IOException { int sizeX = 2048; int resolutions = 3; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", - "--resolutions", String.valueOf(resolutions)); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--resolutions", + String.valueOf(resolutions)); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { int[][] chunks = zpbuf.getChunks(); - int[][] expectedChunks = new int[][] { - new int[] {1, 1, 1, 512, 1024}, - new int[] {1, 1, 1, 256, 1024}, - new int[] {1, 1, 1, 128, 512} - }; + int[][] expectedChunks = new int[][] { new int[] { 1, 1, 1, 512, 1024 }, + new int[] { 1, 1, 1, 256, 1024 }, new int[] { 1, 1, 1, 128, 512 } }; Assert.assertEquals(chunks, expectedChunks); } } @@ -237,19 +212,13 @@ public void testGet3DChunks() throws IOException { int resolutions = 4; int chunkDepth = 16; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", - "--resolutions", String.valueOf(resolutions), - "--chunk-depth", String.valueOf(chunkDepth)); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--resolutions", + String.valueOf(resolutions), "--chunk-depth", String.valueOf(chunkDepth)); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { int[][] chunks = zpbuf.getChunks(); - int[][] expectedChunks = new int[][] { - new int[] {1, 1, chunkDepth, 512, 1024}, - new int[] {1, 1, chunkDepth, 256, 1024}, - new int[] {1, 1, chunkDepth, 128, 512}, - new int[] {1, 1, chunkDepth, 64, 256} - }; + int[][] expectedChunks = new int[][] { new int[] { 1, 1, chunkDepth, 512, 1024 }, + new int[] { 1, 1, chunkDepth, 256, 1024 }, new int[] { 1, 1, chunkDepth, 128, 512 }, + new int[] { 1, 1, chunkDepth, 64, 256 } }; Assert.assertEquals(chunks, expectedChunks); } } @@ -263,25 +232,22 @@ public void testGetDatasets() throws IOException { int sizeX = 2048; int resolutions = 3; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", - "--resolutions", String.valueOf(resolutions)); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--resolutions", + String.valueOf(resolutions)); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { List> datasets = zpbuf.getDatasets(); Assert.assertEquals(datasets.size(), resolutions); for (int i = 0; i < datasets.size(); i++) { Assert.assertEquals(datasets.get(i).get("path"), Integer.toString(i)); - List> transformations = - new ArrayList>(); + List> transformations = new ArrayList>(); Map scale = new HashMap(); scale.put("type", "scale"); - scale.put("scale", Arrays.asList( - new Double[] {1.0, 1.0, 1.0, Math.pow(2, i), Math.pow(2, i)})); + scale.put("scale", + Arrays.asList(new Double[] { 1.0, 1.0, 1.0, Math.pow(2, i), Math.pow(2, i) })); transformations.add(scale); - Assert.assertEquals( - datasets.get(i).get("coordinateTransformations"), transformations); + Assert.assertEquals(datasets.get(i).get("coordinateTransformations"), + transformations); } } } @@ -295,15 +261,13 @@ public void testGetResolutionDescriptions() throws IOException { int sizeX = 2048; int resolutions = 3; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", - "--resolutions", String.valueOf(resolutions)); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--resolutions", + String.valueOf(resolutions)); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { List> expected = new ArrayList>(); - expected.add(Arrays.asList(new Integer[] {2048, 512})); - expected.add(Arrays.asList(new Integer[] {1024, 256})); - expected.add(Arrays.asList(new Integer[] {512, 128})); + expected.add(Arrays.asList(new Integer[] { 2048, 512 })); + expected.add(Arrays.asList(new Integer[] { 1024, 256 })); + expected.add(Arrays.asList(new Integer[] { 512, 128 })); Assert.assertEquals(resolutions, zpbuf.getResolutionLevels()); Assert.assertEquals(expected, zpbuf.getResolutionDescriptions()); @@ -343,9 +307,9 @@ public void testGetTile() throws IOException, InvalidRangeException { for (int i = 0; i < length; i++) { data[i] = i; } - test.write(data, new int[] {sizeT, sizeC, sizeZ, sizeY, sizeX}, new int[] {0, 0, 0, 0, 0}); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + test.write(data, new int[] { sizeT, sizeC, sizeZ, sizeY, sizeX }, + new int[] { 0, 0, 0, 0, 0 }); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { PixelData pixelData = zpbuf.getTile(0, 0, 0, 0, 0, 2, 2); ByteBuffer bb = pixelData.getData(); bb.order(ByteOrder.BIG_ENDIAN); @@ -373,15 +337,12 @@ private Array asArray(byte[] storage, int[] shape) { return Array.factory(DataType.INT, shape, asInt); } - private byte[] getStack( - byte[] timepoint, int c, int sizeC, int sizeZ, int sizeX, - int sizeY) { + private byte[] getStack(byte[] timepoint, int c, int sizeC, int sizeZ, int sizeX, int sizeY) { return getStack(timepoint, c, sizeC, sizeZ, sizeX, sizeY, "TCZYX"); } - private byte[] getStack( - byte[] timepoint, int c, int sizeC, int sizeZ, int sizeX, - int sizeY, String order) { + private byte[] getStack(byte[] timepoint, int c, int sizeC, int sizeZ, int sizeX, int sizeY, + String order) { // XXX: Is not data type agnostic, expects signed 32-bit integer pixels int[] shape = new int[4]; String shapeorder = order.replace("T", ""); @@ -390,22 +351,19 @@ private byte[] getStack( shape[shapeorder.indexOf('Y')] = sizeY; shape[shapeorder.indexOf('X')] = sizeX; int bytesPerPixel = 4; - int size = IntStream.of(new int[] {sizeZ, sizeY, sizeX, bytesPerPixel}) - .reduce(1, Math::multiplyExact); + int size = IntStream.of(new int[] { sizeZ, sizeY, sizeX, bytesPerPixel }).reduce(1, + Math::multiplyExact); Array array = asArray(timepoint, shape).slice(shapeorder.indexOf('C'), c); byte[] asBytes = new byte[size]; - ByteBuffer.wrap(asBytes).asIntBuffer() - .put((int[]) array.copyTo1DJavaArray()); + ByteBuffer.wrap(asBytes).asIntBuffer().put((int[]) array.copyTo1DJavaArray()); return asBytes; } - private byte[] getPlane( - byte[] stack, int z, int sizeZ, int sizeX, int sizeY) { + private byte[] getPlane(byte[] stack, int z, int sizeZ, int sizeX, int sizeY) { return getPlane(stack, z, sizeZ, sizeX, sizeY, "TCZYX"); } - private byte[] getPlane( - byte[] stack, int z, int sizeZ, int sizeX, int sizeY, String order) { + private byte[] getPlane(byte[] stack, int z, int sizeZ, int sizeX, int sizeY, String order) { // XXX: Is not data type agnostic, expects signed 32-bit integer pixels String shapeorder = order.replace("T", "").replace("C", ""); int[] shape = new int[3]; @@ -413,33 +371,27 @@ private byte[] getPlane( shape[shapeorder.indexOf('Y')] = sizeY; shape[shapeorder.indexOf('X')] = sizeX; int bytesPerPixel = 4; - int size = IntStream.of(new int[] {sizeY, sizeX, bytesPerPixel}) - .reduce(1, Math::multiplyExact); + int size = IntStream.of(new int[] { sizeY, sizeX, bytesPerPixel }).reduce(1, + Math::multiplyExact); Array array = asArray(stack, shape).slice(shapeorder.indexOf('Z'), z); byte[] asBytes = new byte[size]; - ByteBuffer.wrap(asBytes).asIntBuffer() - .put((int[]) array.copyTo1DJavaArray()); + ByteBuffer.wrap(asBytes).asIntBuffer().put((int[]) array.copyTo1DJavaArray()); return asBytes; } - private void assertPixels( - ZarrPixelBuffer zpbuf, int sizeX, int sizeY, int sizeZ, int sizeC, int sizeT) - throws IOException { + private void assertPixels(ZarrPixelBuffer zpbuf, int sizeX, int sizeY, int sizeZ, int sizeC, + int sizeT) throws IOException { for (int t = 0; t < sizeT; t++) { for (int c = 0; c < sizeC; c++) { for (int z = 0; z < sizeZ; z++) { byte[] plane = zpbuf.getPlane(z, c, t).getData().array(); - int[] seriesPlaneNumberZCT = FakeReader.readSpecialPixels( - plane, zpbuf.getPixelsType(), false); - int planeNumber = FormatTools.getIndex( - DimensionOrder.VALUE_XYZCT, - sizeZ, sizeC, sizeT, sizeZ * sizeC * sizeT, - z, c, t); - Assert.assertArrayEquals( - Arrays.toString(seriesPlaneNumberZCT), - new int[] {0, planeNumber, z, c, t}, - seriesPlaneNumberZCT); + int[] seriesPlaneNumberZCT = FakeReader.readSpecialPixels(plane, + zpbuf.getPixelsType(), false); + int planeNumber = FormatTools.getIndex(DimensionOrder.VALUE_XYZCT, sizeZ, sizeC, + sizeT, sizeZ * sizeC * sizeT, z, c, t); + Assert.assertArrayEquals(Arrays.toString(seriesPlaneNumberZCT), + new int[] { 0, planeNumber, z, c, t }, seriesPlaneNumberZCT); } } } @@ -471,19 +423,16 @@ private void assertAxes(ZarrPixelBuffer zpbuf, String order) { private byte[] getCol(byte[] plane, int x, int sizeX, int sizeY) { // XXX: Is not data type agnostic, expects signed 32-bit integer pixels int bytesPerPixel = 4; - int[] shape = new int[] {sizeY, sizeX}; - int size = IntStream.of(new int[] {sizeY, bytesPerPixel}) - .reduce(1, Math::multiplyExact); + int[] shape = new int[] { sizeY, sizeX }; + int size = IntStream.of(new int[] { sizeY, bytesPerPixel }).reduce(1, Math::multiplyExact); Array array = asArray(plane, shape).slice(1, x); byte[] asBytes = new byte[size]; - ByteBuffer.wrap(asBytes).asIntBuffer() - .put((int[]) array.copyTo1DJavaArray()); + ByteBuffer.wrap(asBytes).asIntBuffer().put((int[]) array.copyTo1DJavaArray()); return asBytes; } @Test - public void testGetTimepointStackPlaneRowCol() - throws IOException, InvalidRangeException { + public void testGetTimepointStackPlaneRowCol() throws IOException, InvalidRangeException { int sizeT = 2; int sizeC = 3; int sizeZ = 4; @@ -491,42 +440,33 @@ public void testGetTimepointStackPlaneRowCol() int sizeX = 2048; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "int32"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 2048, 2048)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 2048, 2048)) { for (int t = 0; t < sizeT; t++) { // Assert timepoint byte[] timepoint = zpbuf.getTimepoint(t).getData().array(); for (int c = 0; c < sizeC; c++) { // Assert stack byte[] stack = zpbuf.getStack(c, t).getData().array(); - byte[] stackFromTimepoint = - getStack(timepoint, c, sizeC, sizeZ, sizeX, sizeY); + byte[] stackFromTimepoint = getStack(timepoint, c, sizeC, sizeZ, sizeX, sizeY); Assert.assertArrayEquals(stack, stackFromTimepoint); for (int z = 0; z < sizeZ; z++) { // Assert plane - byte[] plane = - zpbuf.getPlane(z, c, t).getData().array(); - byte[] planeFromStack = - getPlane(stack, z, sizeZ, sizeX, sizeY); + byte[] plane = zpbuf.getPlane(z, c, t).getData().array(); + byte[] planeFromStack = getPlane(stack, z, sizeZ, sizeX, sizeY); Assert.assertArrayEquals(plane, planeFromStack); - int[] seriesPlaneNumberZCT = - FakeReader.readSpecialPixels( - plane, zpbuf.getPixelsType(), false); - int planeNumber = FormatTools.getIndex( - DimensionOrder.VALUE_XYZCT, - sizeZ, sizeC, sizeT, sizeZ * sizeC * sizeT, - z, c, t); - Assert.assertArrayEquals( - Arrays.toString(seriesPlaneNumberZCT), - new int[] {0, planeNumber, z, c, t}, - seriesPlaneNumberZCT); + int[] seriesPlaneNumberZCT = FakeReader.readSpecialPixels(plane, + zpbuf.getPixelsType(), false); + int planeNumber = FormatTools.getIndex(DimensionOrder.VALUE_XYZCT, sizeZ, + sizeC, sizeT, sizeZ * sizeC * sizeT, z, c, t); + Assert.assertArrayEquals(Arrays.toString(seriesPlaneNumberZCT), + new int[] { 0, planeNumber, z, c, t }, seriesPlaneNumberZCT); // Assert row int y = sizeY / 2; int rowSize = zpbuf.getRowSize(); int rowOffset = y * rowSize; byte[] row = zpbuf.getRow(y, z, c, t).getData().array(); - byte[] rowExpected = Arrays.copyOfRange( - plane, rowOffset, rowOffset + rowSize); + byte[] rowExpected = Arrays.copyOfRange(plane, rowOffset, + rowOffset + rowSize); Assert.assertArrayEquals(rowExpected, row); // Assert column int x = sizeX / 2; @@ -540,8 +480,7 @@ public void testGetTimepointStackPlaneRowCol() } @Test(expected = DimensionsOutOfBoundsException.class) - public void testGetTileLargerThanImage() - throws IOException, InvalidRangeException { + public void testGetTileLargerThanImage() throws IOException, InvalidRangeException { int sizeT = 2; int sizeC = 3; int sizeZ = 4; @@ -555,9 +494,9 @@ public void testGetTileLargerThanImage() for (int i = 0; i < length; i++) { data[i] = i; } - test.write(data, new int[] {sizeT, sizeC, sizeZ, sizeY, sizeX}, new int[] {0, 0, 0, 0, 0}); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + test.write(data, new int[] { sizeT, sizeC, sizeZ, sizeY, sizeX }, + new int[] { 0, 0, 0, 0, 0 }); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { zpbuf.setResolutionLevel(0); PixelData pixelData = zpbuf.getTile(0, 0, 0, 0, 0, 10, 10); ByteBuffer bb = pixelData.getData(); @@ -571,8 +510,7 @@ public void testGetTileLargerThanImage() } @Test(expected = IllegalArgumentException.class) - public void testTileIntegerOverflow() - throws IOException, InvalidRangeException { + public void testTileIntegerOverflow() throws IOException, InvalidRangeException { int sizeT = 1; int sizeC = 3; int sizeZ = 1; @@ -584,15 +522,13 @@ public void testTileIntegerOverflow() // Hack the .zarray so we can appear as though we have more data than // we actually have written above. ObjectMapper mapper = new ObjectMapper(); - HashMap arrayAttrs = mapper.readValue( - Files.readAllBytes(output.resolve("0/0/.zarray")), - HashMap.class); + HashMap arrayAttrs = mapper + .readValue(Files.readAllBytes(output.resolve("0/0/.zarray")), HashMap.class); List shape = (List) arrayAttrs.get("shape"); shape.set(3, 50000); shape.set(4, 50000); mapper.writeValue(output.resolve("0/0/.zarray").toFile(), arrayAttrs); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 32, 32)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 32, 32)) { zpbuf.getTile(0, 0, 0, 0, 0, 50000, 50000); } } @@ -607,8 +543,7 @@ public void testTileExceedsMinMax() throws IOException { Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 32, 32)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 32, 32)) { Assert.assertNull(zpbuf.getTile(0, 0, 0, 0, 0, 32, 33)); // Throws exception zpbuf.getTile(0, 0, 0, -1, 0, 1, 1); @@ -624,8 +559,7 @@ public void testCheckBoundsValidZeros() throws IOException { int sizeX = 2048; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { zpbuf.checkBounds(0, 0, 0, 0, 0); } } @@ -639,8 +573,7 @@ public void testCheckBoundsValidEnd() throws IOException { int sizeX = 2048; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { zpbuf.checkBounds(2047, 511, 2, 1, 0); } } @@ -654,8 +587,7 @@ public void testCheckBoundsOutOfRange() throws IOException { int sizeX = 2048; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { zpbuf.checkBounds(2048, 511, 2, 1, 0); } } @@ -667,11 +599,9 @@ public void testCheckBounds() throws IOException { int sizeZ = 3; int sizeY = 512; int sizeX = 2048; - Pixels pixels = new Pixels( - null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); + Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { zpbuf.checkBounds(-1, 0, 0, 0, 0); } } @@ -685,8 +615,7 @@ public void testGetTileSize() throws IOException { int sizeX = 2048; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { Dimension tileSize = zpbuf.getTileSize(); Assert.assertEquals(1024, tileSize.getWidth(), 0.1); Assert.assertEquals(1024, tileSize.getHeight(), 0.1); @@ -704,8 +633,7 @@ public void testUint16() throws IOException { int bytesPerPixel = 2; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { Assert.assertEquals(FormatTools.UINT16, zpbuf.getPixelsType()); Assert.assertEquals(false, zpbuf.isSigned()); Assert.assertEquals(false, zpbuf.isFloat()); @@ -724,8 +652,7 @@ public void testFloat() throws IOException { int bytesPerPixel = 4; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "float"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { Assert.assertEquals(FormatTools.FLOAT, zpbuf.getPixelsType()); Assert.assertEquals(true, zpbuf.isSigned()); Assert.assertEquals(true, zpbuf.isFloat()); @@ -743,32 +670,22 @@ public void testSizes() throws IOException { int bytesPerPixel = 2; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { // Plane size - Assert.assertEquals( - sizeX * sizeY * bytesPerPixel, - zpbuf.getPlaneSize().longValue()); + Assert.assertEquals(sizeX * sizeY * bytesPerPixel, zpbuf.getPlaneSize().longValue()); // Stack size - Assert.assertEquals( - sizeZ * sizeX * sizeY * bytesPerPixel, - zpbuf.getStackSize().longValue()); + Assert.assertEquals(sizeZ * sizeX * sizeY * bytesPerPixel, + zpbuf.getStackSize().longValue()); // Timepoint size - Assert.assertEquals( - sizeC * sizeZ * sizeX * sizeY * bytesPerPixel, - zpbuf.getTimepointSize().longValue()); + Assert.assertEquals(sizeC * sizeZ * sizeX * sizeY * bytesPerPixel, + zpbuf.getTimepointSize().longValue()); // Total size - Assert.assertEquals( - sizeT * sizeC * sizeZ * sizeX * sizeY * bytesPerPixel, - zpbuf.getTotalSize().longValue()); + Assert.assertEquals(sizeT * sizeC * sizeZ * sizeX * sizeY * bytesPerPixel, + zpbuf.getTotalSize().longValue()); // Column size - Assert.assertEquals( - sizeY * bytesPerPixel, - zpbuf.getColSize().longValue()); + Assert.assertEquals(sizeY * bytesPerPixel, zpbuf.getColSize().longValue()); // Row size - Assert.assertEquals( - sizeX * bytesPerPixel, - zpbuf.getRowSize().longValue()); + Assert.assertEquals(sizeX * bytesPerPixel, zpbuf.getRowSize().longValue()); } } @@ -781,11 +698,9 @@ public void testSetResolutionLevelOutOfBounds() throws IOException { int sizeX = 2048; int resolutions = 3; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", - "--resolutions", String.valueOf(resolutions)); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--resolutions", + String.valueOf(resolutions)); + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), 1024, 1024)) { zpbuf.setResolutionLevel(resolutions); } } @@ -799,23 +714,20 @@ public void testDownsampledZ() throws IOException { int sizeX = 2048; int resolutions = 3; Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint8", - "--resolutions", String.valueOf(resolutions)); + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint8", "--resolutions", + String.valueOf(resolutions)); // Hack the .zarray to hide Z sections in lower resolutions for (int r = 1; r < resolutions; r++) { ObjectMapper mapper = new ObjectMapper(); HashMap arrayAttrs = mapper.readValue( - Files.readAllBytes(output.resolve("0/" + r + "/.zarray")), - HashMap.class); + Files.readAllBytes(output.resolve("0/" + r + "/.zarray")), HashMap.class); List shape = (List) arrayAttrs.get("shape"); shape.set(2, sizeZ / (int) Math.pow(2, r)); mapper.writeValue(output.resolve("0/" + r + "/.zarray").toFile(), arrayAttrs); } - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { // get the last Z section, for each resolution level for (int r = 0; r < resolutions; r++) { zpbuf.setResolutionLevel(r); @@ -826,8 +738,7 @@ public void testDownsampledZ() throws IOException { } @Test - public void testReadDataNonDefaultAxes() - throws IOException, InvalidRangeException { + public void testReadDataNonDefaultAxes() throws IOException, InvalidRangeException { // Pretty much the same as testGetTimepointStackPlaneRowCol() // but testing a different axes order. int sizeT = 2; @@ -836,37 +747,34 @@ public void testReadDataNonDefaultAxes() int sizeY = 1024; int sizeX = 2048; String order = DimensionOrder.VALUE_XYCTZ; // Default XYZCT - String revOrder = new StringBuilder(order).reverse().toString(); - Pixels pixels = new Pixels( - null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", new DimensionOrder(order)); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "int32", "--dimension-order", order); - - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { + String revOrder = new StringBuilder(order).reverse().toString(); + Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", + new DimensionOrder(order)); + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "int32", "--dimension-order", + order); + + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { for (int t = 0; t < sizeT; t++) { // Assert timepoint byte[] timepoint = zpbuf.getTimepoint(t).getData().array(); for (int c = 0; c < sizeC; c++) { // Assert stack byte[] stack = zpbuf.getStack(c, t).getData().array(); - byte[] stackFromTimepoint = - getStack(timepoint, c, sizeC, sizeZ, sizeX, sizeY, revOrder); + byte[] stackFromTimepoint = getStack(timepoint, c, sizeC, sizeZ, sizeX, sizeY, + revOrder); Assert.assertArrayEquals(stack, stackFromTimepoint); for (int z = 0; z < sizeZ; z++) { // Assert plane - byte[] plane = - zpbuf.getPlane(z, c, t).getData().array(); - byte[] planeFromStack = - getPlane(stack, z, sizeZ, sizeX, sizeY, revOrder); + byte[] plane = zpbuf.getPlane(z, c, t).getData().array(); + byte[] planeFromStack = getPlane(stack, z, sizeZ, sizeX, sizeY, revOrder); Assert.assertArrayEquals(plane, planeFromStack); // Assert row int y = sizeY / 2; int rowSize = zpbuf.getRowSize(); int rowOffset = y * rowSize; byte[] row = zpbuf.getRow(y, z, c, t).getData().array(); - byte[] rowExpected = Arrays.copyOfRange( - plane, rowOffset, rowOffset + rowSize); + byte[] rowExpected = Arrays.copyOfRange(plane, rowOffset, + rowOffset + rowSize); Assert.assertArrayEquals(rowExpected, row); // Assert column int x = sizeX / 2; @@ -880,8 +788,7 @@ public void testReadDataNonDefaultAxes() } @Test - public void testDefaultOrder() - throws IOException, InvalidRangeException { + public void testDefaultOrder() throws IOException, InvalidRangeException { // Check that if access are not in the file it defaults to TCZYX order when no axes found int sizeT = 1; @@ -892,8 +799,7 @@ public void testDefaultOrder() Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint8"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { Assert.assertEquals(sizeT, zpbuf.getSizeT()); Assert.assertEquals(sizeC, zpbuf.getSizeC()); Assert.assertEquals(sizeZ, zpbuf.getSizeZ()); @@ -979,13 +885,12 @@ private void testOrder(String order) throws IOException, InvalidRangeException { int sizeZ = 4; int sizeY = 256; int sizeX = 512; - Pixels pixels = new Pixels( - null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", new DimensionOrder(order)); - Path output = writeTestZarr( - sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--dimension-order", order); + Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", + new DimensionOrder(order)); + Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", + "--dimension-order", order); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { Assert.assertEquals(sizeT, zpbuf.getSizeT()); Assert.assertEquals(sizeC, zpbuf.getSizeC()); Assert.assertEquals(sizeZ, zpbuf.getSizeZ()); @@ -996,14 +901,12 @@ private void testOrder(String order) throws IOException, InvalidRangeException { } } - private void testCompactDimensions( - int sizeX, int sizeY, int sizeZ, int sizeC, int sizeT, String order) - throws IOException, InvalidRangeException { - + private void testCompactDimensions(int sizeX, int sizeY, int sizeZ, int sizeC, int sizeT, + String order) throws IOException, InvalidRangeException { + Pixels pixels = new Pixels(null, null, sizeX, sizeY, sizeZ, sizeC, sizeT, "", null); Path output = writeTestZarr(sizeT, sizeC, sizeZ, sizeY, sizeX, "uint16", "--compact"); - try (ZarrPixelBuffer zpbuf = - createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { + try (ZarrPixelBuffer zpbuf = createPixelBuffer(pixels, output.resolve("0"), sizeX, sizeY)) { assertAxes(zpbuf, order); assertPixels(zpbuf, sizeX, sizeY, sizeZ, sizeC, sizeT); } diff --git a/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelsServiceTest.java b/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelsServiceTest.java index 6d96c506..bf62c584 100644 --- a/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelsServiceTest.java +++ b/src/test/java/com/glencoesoftware/omero/zarr/ZarrPixelsServiceTest.java @@ -18,11 +18,9 @@ package com.glencoesoftware.omero.zarr; -import static omero.rtypes.rdouble; import static omero.rtypes.rlong; import static omero.rtypes.rstring; -import com.glencoesoftware.omero.zarr.ZarrPixelsService; import java.io.File; import java.io.IOException; import java.nio.file.Files;