From adc45ae07b157e8846921aef8d8c15ac68fa2a46 Mon Sep 17 00:00:00 2001
From: brokkoli71 <44113112+brokkoli71@users.noreply.github.com>
Date: Mon, 3 Jun 2024 23:30:25 +0200
Subject: [PATCH 01/15] Test zarrita compatibility (#3)
* add zarrita python script
* add testReadFromZarrita
* renamed zarrita_write
* add testWriteToZarrita
* parameterized codecs in testReadFromZarrita
* fixed camelcase in zarr.json
* add venv for executing zarrita in linux
* adapt test-setup for linux
* adapt test-setup for linux
* fix testReadFromZarrita for gzip
* parameterize codecs in testReadFromZarrita
* remove unused imports
* add testCodecsWriteRead
* reformat
* remove unnecessary argument
* add testCodecTranspose
* start remove constants "C" adn "F" from Transpose Codec's order
* add CoreArrayMetadata to codec object instead of passing as argument in encode and decode
remove transpose order "F" and "C"
* remove wrong dependency
* set fail-fast: false
* specify testSourceDirectory
* added property index_location to ShardingIndexedCodec
* add indexLocation in ShardingCodec.
* change junit version for TestUtils
* remove creation of dir testdata
* update dependencies for JUnit 5
* install zarrita in CI
* correct python version, maven no-transfer-progress
* add venv for executing zarrita to CI
* add /venv_zarrita to .gitignore
* remove deprecated zarrita venv
* test CI
* fix tests for windows
* update python path for windows in ci.yml
* add Development Start-Guide to Run Tests Locally
* correct Development Start-Guide to python3.11
* add support of shardingCodec indexLocation=start
* code cleanup
* add testZstdLibrary
* fix Zstd compression and decompression
* cleanup code
* cleanup code
* remove unused method
* include crc32c codec into tests
* incorporate feedback for java version and merge tests
* add more variation to codec-configuration in tests with zarrita
* fix BloscCodec with shuffle = "shuffle"
---
.github/workflows/ci.yml | 20 +-
.gitignore | 3 +-
README.md | 16 +
pom.xml | 37 +-
.../java/dev/zarr/zarrjava/utils/Utils.java | 20 +
src/main/java/dev/zarr/zarrjava/v3/Array.java | 9 +-
.../zarrjava/v3/codec/ArrayArrayCodec.java | 7 +-
.../zarrjava/v3/codec/ArrayBytesCodec.java | 17 +-
.../zarrjava/v3/codec/BytesBytesCodec.java | 10 +-
.../dev/zarr/zarrjava/v3/codec/Codec.java | 19 +-
.../zarr/zarrjava/v3/codec/CodecBuilder.java | 36 +-
.../zarr/zarrjava/v3/codec/CodecPipeline.java | 29 +-
.../zarrjava/v3/codec/core/BloscCodec.java | 10 +-
.../zarrjava/v3/codec/core/BytesCodec.java | 8 +-
.../zarrjava/v3/codec/core/Crc32cCodec.java | 10 +-
.../zarrjava/v3/codec/core/GzipCodec.java | 6 +-
.../v3/codec/core/ShardingIndexedCodec.java | 119 ++--
.../v3/codec/core/TransposeCodec.java | 113 ++--
.../zarrjava/v3/codec/core/ZstdCodec.java | 117 ++--
.../java/dev/zarr/zarrjava/TestUtils.java | 32 +
src/test/java/dev/zarr/zarrjava/ZarrTest.java | 626 ++++++++++++------
src/test/python-scripts/zarrita_read.py | 53 ++
src/test/python-scripts/zarrita_write.py | 47 ++
src/test/python-scripts/zstd_decompress.py | 13 +
testdata/sharding_index_location/end/c/0/0/0 | Bin 0 -> 4228 bytes
testdata/sharding_index_location/end/c/0/0/1 | Bin 0 -> 4228 bytes
testdata/sharding_index_location/end/c/0/1/0 | Bin 0 -> 4228 bytes
testdata/sharding_index_location/end/c/0/1/1 | Bin 0 -> 4228 bytes
.../sharding_index_location/end/zarr.json | 76 +++
.../sharding_index_location/start/c/0/0/0 | Bin 0 -> 4228 bytes
.../sharding_index_location/start/c/0/0/1 | Bin 0 -> 4228 bytes
.../sharding_index_location/start/c/0/1/0 | Bin 0 -> 4228 bytes
.../sharding_index_location/start/c/0/1/1 | Bin 0 -> 4228 bytes
.../sharding_index_location/start/zarr.json | 76 +++
34 files changed, 1100 insertions(+), 429 deletions(-)
create mode 100644 src/test/java/dev/zarr/zarrjava/TestUtils.java
create mode 100644 src/test/python-scripts/zarrita_read.py
create mode 100644 src/test/python-scripts/zarrita_write.py
create mode 100644 src/test/python-scripts/zstd_decompress.py
create mode 100644 testdata/sharding_index_location/end/c/0/0/0
create mode 100644 testdata/sharding_index_location/end/c/0/0/1
create mode 100644 testdata/sharding_index_location/end/c/0/1/0
create mode 100644 testdata/sharding_index_location/end/c/0/1/1
create mode 100644 testdata/sharding_index_location/end/zarr.json
create mode 100644 testdata/sharding_index_location/start/c/0/0/0
create mode 100644 testdata/sharding_index_location/start/c/0/0/1
create mode 100644 testdata/sharding_index_location/start/c/0/1/0
create mode 100644 testdata/sharding_index_location/start/c/0/1/1
create mode 100644 testdata/sharding_index_location/start/zarr.json
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e913c2d..be4cfe8 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -11,14 +11,16 @@ jobs:
build:
strategy:
matrix:
- os: [ ubuntu, windows, macos ]
- runs-on: ${{ matrix.os }}-latest
+ os: [ ubuntu-latest, windows-latest, macos-latest ]
+ fail-fast: false
+ runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@v3
+
- name: Set up JDK
uses: actions/setup-java@v3
with:
@@ -26,6 +28,16 @@ jobs:
distribution: 'temurin'
cache: maven
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+
+ - name: Install zarrita
+ run: |
+ python -m venv venv_zarrita
+ if [ "${{ runner.os }}" = "Windows" ]; then venv_zarrita/Scripts/pip install zarrita; else venv_zarrita/bin/pip install zarrita; fi
+
- name: Download blosc jar
run: |
mkdir -p ../blosc-java/target
@@ -33,7 +45,7 @@ jobs:
- name: Download testdata
run: |
- mkdir testdata testoutput
+ mkdir testoutput
curl https://static.webknossos.org/data/zarr_v3/l4_sample.zip -o testdata/l4_sample.zip
cd testdata
unzip l4_sample.zip
@@ -44,7 +56,7 @@ jobs:
- name: Test
env:
MAVEN_OPTS: "-Xmx6g"
- run: mvn test -DargLine="-Xmx6g"
+ run: mvn --no-transfer-progress test -DargLine="-Xmx6g"
- name: Assemble JAR
run: mvn package -DskipTests
diff --git a/.gitignore b/.gitignore
index bd4b34c..4991720 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,5 +36,6 @@ build/
### Custom ###
-/testdata
+/testdata/l4_sample
/testoutput
+/venv_zarrita
diff --git a/README.md b/README.md
index ae77c5c..76809f7 100644
--- a/README.md
+++ b/README.md
@@ -38,3 +38,19 @@ array.write(
ucar.ma2.Array.factory(ucar.ma2.DataType.UINT, new int[]{1, 1024, 1024, 1024})
);
```
+## Development Start-Guide
+
+### Run Tests Locally
+To be able to run the tests locally, make sure to have `python3.11` installed.
+Also, you need to set up a venv for zarrita at the root of the project:
+`python3.11 -m venv venv_zarrita`.
+
+Then install zarrita there with `venv_zarrita/Scripts/pip install zarrita`
+for Windows and `venv_zarrita/bin/pip install zarrita` for Linux.
+
+Furthermore, you will need the `l4_sample` test data:
+
+`curl https://static.webknossos.org/data/zarr_v3/l4_sample.zip -o testdata/l4_sample.zip
+&& cd testdata
+&& unzip l4_sample.zip
+`
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index edd4fe7..e3f4e4b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -16,9 +16,31 @@
1.12.477
5.5.3
1.5.5-5
+ 5.10.2
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ ${junit-jupiter-version}
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ ${junit-jupiter-version}
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ ${junit-jupiter-version}
+ test
+
+
+
com.fasterxml.jackson.core
jackson-databind
@@ -54,6 +76,7 @@
okhttp
2.7.5
+
junit
junit
@@ -70,4 +93,16 @@
-
\ No newline at end of file
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 3.2.5
+
+ false
+
+
+
+
+
diff --git a/src/main/java/dev/zarr/zarrjava/utils/Utils.java b/src/main/java/dev/zarr/zarrjava/utils/Utils.java
index 93b7cf1..3ea4bfd 100644
--- a/src/main/java/dev/zarr/zarrjava/utils/Utils.java
+++ b/src/main/java/dev/zarr/zarrjava/utils/Utils.java
@@ -77,4 +77,24 @@ public static T[] concatArrays(T[] array1, T[]... arrays) {
}
return result;
}
+
+ public static boolean isPermutation(int[] array) {
+ if (array.length==0){
+ return false;
+ }
+ int[] arange = new int[array.length];
+ Arrays.setAll(arange, i -> i);
+ int[] orderSorted = array.clone();
+ Arrays.sort(orderSorted);
+ return Arrays.equals(orderSorted, arange);
+ }
+
+ public static int[] inversePermutation(int[] origin){
+ assert isPermutation(origin);
+ int[] inverse = new int[origin.length];
+ for (int i = 0; i < origin.length; i++) {
+ inverse[origin[i]] = i;
+ }
+ return inverse;
+ }
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/Array.java b/src/main/java/dev/zarr/zarrjava/v3/Array.java
index 631ef70..85e0457 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/Array.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/Array.java
@@ -27,7 +27,7 @@ protected Array(StoreHandle storeHandle, ArrayMetadata arrayMetadata)
throws IOException, ZarrException {
super(storeHandle);
this.metadata = arrayMetadata;
- this.codecPipeline = new CodecPipeline(arrayMetadata.codecs);
+ this.codecPipeline = new CodecPipeline(arrayMetadata.codecs, arrayMetadata.coreArrayMetadata);
}
/**
@@ -171,8 +171,7 @@ public ucar.ma2.Array read(final long[] offset, final int[] shape) throws ZarrEx
if (codecPipeline.supportsPartialDecode()) {
final ucar.ma2.Array chunkArray = codecPipeline.decodePartial(chunkHandle,
- Utils.toLongArray(chunkProjection.chunkOffset), chunkProjection.shape,
- metadata.coreArrayMetadata);
+ Utils.toLongArray(chunkProjection.chunkOffset), chunkProjection.shape);
MultiArrayUtils.copyRegion(chunkArray, new int[metadata.ndim()], outputArray,
chunkProjection.outOffset, chunkProjection.shape
);
@@ -223,7 +222,7 @@ public ucar.ma2.Array readChunk(long[] chunkCoords)
return metadata.allocateFillValueChunk();
}
- return codecPipeline.decode(chunkBytes, metadata.coreArrayMetadata);
+ return codecPipeline.decode(chunkBytes);
}
/**
@@ -299,7 +298,7 @@ public void writeChunk(long[] chunkCoords, ucar.ma2.Array chunkArray) throws Zar
if (MultiArrayUtils.allValuesEqual(chunkArray, metadata.parsedFillValue)) {
chunkHandle.delete();
} else {
- ByteBuffer chunkBytes = codecPipeline.encode(chunkArray, metadata.coreArrayMetadata);
+ ByteBuffer chunkBytes = codecPipeline.encode(chunkArray);
chunkHandle.set(chunkBytes);
}
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayArrayCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayArrayCodec.java
index 868183a..a488d30 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayArrayCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayArrayCodec.java
@@ -1,15 +1,14 @@
package dev.zarr.zarrjava.v3.codec;
import dev.zarr.zarrjava.ZarrException;
-import dev.zarr.zarrjava.v3.ArrayMetadata.CoreArrayMetadata;
import ucar.ma2.Array;
-public interface ArrayArrayCodec extends Codec {
+public abstract class ArrayArrayCodec extends Codec {
- Array encode(Array chunkArray, CoreArrayMetadata arrayMetadata)
+ protected abstract Array encode(Array chunkArray)
throws ZarrException;
- Array decode(Array chunkArray, CoreArrayMetadata arrayMetadata)
+ protected abstract Array decode(Array chunkArray)
throws ZarrException;
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayBytesCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayBytesCodec.java
index ad042ac..361ae61 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayBytesCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/ArrayBytesCodec.java
@@ -2,23 +2,24 @@
import dev.zarr.zarrjava.ZarrException;
import dev.zarr.zarrjava.store.StoreHandle;
-import dev.zarr.zarrjava.v3.ArrayMetadata.CoreArrayMetadata;
import java.nio.ByteBuffer;
import ucar.ma2.Array;
-public interface ArrayBytesCodec extends Codec {
+public abstract class ArrayBytesCodec extends Codec {
- ByteBuffer encode(Array chunkArray, CoreArrayMetadata arrayMetadata)
+ protected abstract ByteBuffer encode(Array chunkArray)
throws ZarrException;
- Array decode(ByteBuffer chunkBytes, CoreArrayMetadata arrayMetadata)
+ protected abstract Array decode(ByteBuffer chunkBytes)
throws ZarrException;
- interface WithPartialDecode extends ArrayBytesCodec {
+ public abstract static class WithPartialDecode extends ArrayBytesCodec {
- Array decodePartial(
- StoreHandle handle, long[] offset, int[] shape,
- CoreArrayMetadata arrayMetadata
+ public abstract Array decode(ByteBuffer shardBytes) throws ZarrException;
+ public abstract ByteBuffer encode(Array shardArray) throws ZarrException;
+
+ protected abstract Array decodePartial(
+ StoreHandle handle, long[] offset, int[] shape
) throws ZarrException;
}
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/BytesBytesCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/BytesBytesCodec.java
index d8ce47a..6435463 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/BytesBytesCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/BytesBytesCodec.java
@@ -1,15 +1,13 @@
package dev.zarr.zarrjava.v3.codec;
import dev.zarr.zarrjava.ZarrException;
-import dev.zarr.zarrjava.v3.ArrayMetadata.CoreArrayMetadata;
+
import java.nio.ByteBuffer;
-public interface BytesBytesCodec extends Codec {
+public abstract class BytesBytesCodec extends Codec {
- ByteBuffer encode(ByteBuffer chunkBytes, CoreArrayMetadata arrayMetadata)
- throws ZarrException;
+ protected abstract ByteBuffer encode(ByteBuffer chunkBytes) throws ZarrException;
- ByteBuffer decode(ByteBuffer chunkBytes, CoreArrayMetadata arrayMetadata)
- throws ZarrException;
+ public abstract ByteBuffer decode(ByteBuffer chunkBytes) throws ZarrException;
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/Codec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/Codec.java
index 6ce7687..988dd1d 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/Codec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/Codec.java
@@ -5,9 +5,22 @@
import dev.zarr.zarrjava.v3.ArrayMetadata;
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "name")
-public interface Codec {
+public abstract class Codec {
- long computeEncodedSize(long inputByteLength, ArrayMetadata.CoreArrayMetadata arrayMetadata)
- throws ZarrException;
+ protected ArrayMetadata.CoreArrayMetadata arrayMetadata;
+
+ protected ArrayMetadata.CoreArrayMetadata resolveArrayMetadata() throws ZarrException {
+ if (arrayMetadata == null) {
+ throw new ZarrException("arrayMetadata needs to get set in for every codec");
+ }
+ return this.arrayMetadata;
+ }
+
+ protected abstract long computeEncodedSize(long inputByteLength, ArrayMetadata.CoreArrayMetadata arrayMetadata)
+ throws ZarrException;
+
+ public void setCoreArrayMetadata(ArrayMetadata.CoreArrayMetadata arrayMetadata) throws ZarrException{
+ this.arrayMetadata = arrayMetadata;
+ }
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/CodecBuilder.java b/src/main/java/dev/zarr/zarrjava/v3/codec/CodecBuilder.java
index 5e33d7c..3776a43 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/CodecBuilder.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/CodecBuilder.java
@@ -41,6 +41,9 @@ public CodecBuilder withBlosc(
}
public CodecBuilder withBlosc(String cname, String shuffle, int clevel, int blockSize) {
+ if (shuffle.equals("shuffle")){
+ shuffle = "byteshuffle";
+ }
return withBlosc(Blosc.Compressor.fromString(cname), Blosc.Shuffle.fromString(shuffle), clevel,
dataType.getByteCount(), blockSize
);
@@ -62,13 +65,9 @@ public CodecBuilder withBlosc() {
return withBlosc("zstd");
}
- public CodecBuilder withTranspose(String order) {
- try {
+ public CodecBuilder withTranspose(int[] order) {
codecs.add(new TransposeCodec(new TransposeCodec.Configuration(order)));
- } catch (ZarrException e) {
- throw new RuntimeException(e);
- }
- return this;
+ return this;
}
public CodecBuilder withBytes(Endian endian) {
@@ -113,9 +112,10 @@ public CodecBuilder withZstd(int clevel) {
public CodecBuilder withSharding(int[] chunkShape) {
try {
codecs.add(
- new ShardingIndexedCodec(new ShardingIndexedCodec.Configuration(chunkShape,
- new Codec[]{new BytesCodec(new Configuration(Endian.LITTLE))},
- new Codec[]{new BytesCodec(new Configuration(Endian.LITTLE)), new Crc32cCodec()})));
+ new ShardingIndexedCodec(new ShardingIndexedCodec.Configuration(chunkShape,
+ new Codec[]{new BytesCodec(new Configuration(Endian.LITTLE))},
+ new Codec[]{new BytesCodec(new Configuration(Endian.LITTLE)), new Crc32cCodec()},
+ "end")));
} catch (ZarrException e) {
throw new RuntimeException(e);
}
@@ -123,19 +123,29 @@ public CodecBuilder withSharding(int[] chunkShape) {
}
public CodecBuilder withSharding(int[] chunkShape,
- Function codecBuilder) {
+ Function codecBuilder) {
+ return withSharding(chunkShape, codecBuilder, "end");
+ }
+
+ public CodecBuilder withSharding(int[] chunkShape,
+ Function codecBuilder, String indexLocation) {
CodecBuilder nestedBuilder = new CodecBuilder(dataType);
try {
codecs.add(new ShardingIndexedCodec(
- new ShardingIndexedCodec.Configuration(chunkShape,
- codecBuilder.apply(nestedBuilder).build(),
- new Codec[]{new BytesCodec(Endian.LITTLE), new Crc32cCodec()})));
+ new ShardingIndexedCodec.Configuration(chunkShape,
+ codecBuilder.apply(nestedBuilder).build(),
+ new Codec[]{new BytesCodec(Endian.LITTLE), new Crc32cCodec()},
+ indexLocation)));
} catch (ZarrException e) {
throw new RuntimeException(e);
}
return this;
}
+ public CodecBuilder withCrc32c() {
+ codecs.add(new Crc32cCodec());
+ return this;
+ }
private void autoInsertBytesCodec() {
if (codecs.stream().noneMatch(c -> c instanceof ArrayBytesCodec)) {
Codec[] arrayArrayCodecs = codecs.stream().filter(c -> c instanceof ArrayArrayCodec)
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/CodecPipeline.java b/src/main/java/dev/zarr/zarrjava/v3/codec/CodecPipeline.java
index 9ece0f0..920a1f4 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/CodecPipeline.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/CodecPipeline.java
@@ -12,8 +12,10 @@ public class CodecPipeline {
@Nonnull
final Codec[] codecs;
+ public final CoreArrayMetadata arrayMetadata;
- public CodecPipeline(@Nonnull Codec[] codecs) throws ZarrException {
+ public CodecPipeline(@Nonnull Codec[] codecs, CoreArrayMetadata arrayMetadata) throws ZarrException {
+ this.arrayMetadata = arrayMetadata;
long arrayBytesCodecCount = Arrays.stream(codecs).filter(c -> c instanceof ArrayBytesCodec)
.count();
if (arrayBytesCodecCount != 1) {
@@ -21,6 +23,7 @@ public CodecPipeline(@Nonnull Codec[] codecs) throws ZarrException {
"Exactly 1 ArrayBytesCodec is required. Found " + arrayBytesCodecCount + ".");
}
Codec prevCodec = null;
+ CoreArrayMetadata codecArrayMetadata = arrayMetadata;
for (Codec codec : codecs) {
if (prevCodec != null) {
if (codec instanceof ArrayBytesCodec && prevCodec instanceof ArrayBytesCodec) {
@@ -44,6 +47,8 @@ public CodecPipeline(@Nonnull Codec[] codecs) throws ZarrException {
prevCodec.getClass() + "'.");
}
}
+ codec.setCoreArrayMetadata(codecArrayMetadata);
+ codecArrayMetadata = codec.resolveArrayMetadata();
prevCodec = codec;
}
@@ -79,15 +84,14 @@ public boolean supportsPartialDecode() {
@Nonnull
public Array decodePartial(
@Nonnull StoreHandle storeHandle,
- long[] offset, int[] shape,
- @Nonnull CoreArrayMetadata arrayMetadata
+ long[] offset, int[] shape
) throws ZarrException {
if (!supportsPartialDecode()) {
throw new ZarrException(
"Partial decode is not supported for these codecs. " + Arrays.toString(codecs));
}
Array chunkArray = ((ArrayBytesCodec.WithPartialDecode) getArrayBytesCodec()).decodePartial(
- storeHandle, offset, shape, arrayMetadata);
+ storeHandle, offset, shape);
if (chunkArray == null) {
throw new ZarrException("chunkArray is null. This is likely a bug in one of the codecs.");
}
@@ -96,8 +100,7 @@ public Array decodePartial(
@Nonnull
public Array decode(
- @Nonnull ByteBuffer chunkBytes,
- @Nonnull CoreArrayMetadata arrayMetadata
+ @Nonnull ByteBuffer chunkBytes
) throws ZarrException {
if (chunkBytes == null) {
throw new ZarrException("chunkBytes is null. Ohh nooo.");
@@ -106,7 +109,7 @@ public Array decode(
BytesBytesCodec[] bytesBytesCodecs = getBytesBytesCodecs();
for (int i = bytesBytesCodecs.length - 1; i >= 0; --i) {
BytesBytesCodec codec = bytesBytesCodecs[i];
- chunkBytes = codec.decode(chunkBytes, arrayMetadata);
+ chunkBytes = codec.decode(chunkBytes);
}
if (chunkBytes == null) {
@@ -114,7 +117,7 @@ public Array decode(
"chunkBytes is null. This is likely a bug in one of the codecs. " + Arrays.toString(
getBytesBytesCodecs()));
}
- Array chunkArray = getArrayBytesCodec().decode(chunkBytes, arrayMetadata);
+ Array chunkArray = getArrayBytesCodec().decode(chunkBytes);
if (chunkArray == null) {
throw new ZarrException("chunkArray is null. This is likely a bug in one of the codecs.");
}
@@ -122,7 +125,7 @@ public Array decode(
ArrayArrayCodec[] arrayArrayCodecs = getArrayArrayCodecs();
for (int i = arrayArrayCodecs.length - 1; i >= 0; --i) {
ArrayArrayCodec codec = arrayArrayCodecs[i];
- chunkArray = codec.decode(chunkArray, arrayMetadata);
+ chunkArray = codec.decode(chunkArray);
}
if (chunkArray == null) {
@@ -133,16 +136,16 @@ public Array decode(
@Nonnull
public ByteBuffer encode(
- @Nonnull Array chunkArray, @Nonnull CoreArrayMetadata arrayMetadata
+ @Nonnull Array chunkArray
) throws ZarrException {
for (ArrayArrayCodec codec : getArrayArrayCodecs()) {
- chunkArray = codec.encode(chunkArray, arrayMetadata);
+ chunkArray = codec.encode(chunkArray);
}
- ByteBuffer chunkBytes = getArrayBytesCodec().encode(chunkArray, arrayMetadata);
+ ByteBuffer chunkBytes = getArrayBytesCodec().encode(chunkArray);
for (BytesBytesCodec codec : getBytesBytesCodecs()) {
- chunkBytes = codec.encode(chunkBytes, arrayMetadata);
+ chunkBytes = codec.encode(chunkBytes);
}
return chunkBytes;
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/BloscCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/BloscCodec.java
index 2c3412b..819a610 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/BloscCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/BloscCodec.java
@@ -20,7 +20,7 @@
import java.nio.ByteBuffer;
import javax.annotation.Nonnull;
-public class BloscCodec implements BytesBytesCodec {
+public class BloscCodec extends BytesBytesCodec {
public final String name = "blosc";
@Nonnull
@@ -33,7 +33,7 @@ public BloscCodec(
}
@Override
- public ByteBuffer decode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
+ public ByteBuffer decode(ByteBuffer chunkBytes)
throws ZarrException {
try {
return ByteBuffer.wrap(Blosc.decompress(Utils.toArray(chunkBytes)));
@@ -43,7 +43,7 @@ public ByteBuffer decode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata
}
@Override
- public ByteBuffer encode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
+ public ByteBuffer encode(ByteBuffer chunkBytes)
throws ZarrException {
try {
return ByteBuffer.wrap(
@@ -84,7 +84,7 @@ public void serialize(Blosc.Shuffle shuffle, JsonGenerator generator,
generator.writeString("bitshuffle");
break;
case BYTE_SHUFFLE:
- generator.writeString("byteshuffle");
+ generator.writeString("shuffle");
break;
}
}
@@ -154,7 +154,7 @@ public Blosc.Shuffle deserialize(JsonParser jsonParser, DeserializationContext c
return Blosc.Shuffle.NO_SHUFFLE;
case "bitshuffle":
return Blosc.Shuffle.BIT_SHUFFLE;
- case "byteshuffle":
+ case "shuffle":
return Blosc.Shuffle.BYTE_SHUFFLE;
default:
throw new JsonParseException(
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/BytesCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/BytesCodec.java
index de9d1e7..1415da9 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/BytesCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/BytesCodec.java
@@ -11,7 +11,7 @@
import javax.annotation.Nonnull;
import ucar.ma2.Array;
-public class BytesCodec implements ArrayBytesCodec {
+public class BytesCodec extends ArrayBytesCodec {
public final String name = "bytes";
@Nonnull
@@ -29,14 +29,14 @@ public BytesCodec(Endian endian) {
}
@Override
- public Array decode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata) {
+ public Array decode(ByteBuffer chunkBytes) {
chunkBytes.order(configuration.endian.getByteOrder());
return Array.factory(arrayMetadata.dataType.getMA2DataType(), arrayMetadata.chunkShape,
chunkBytes);
}
@Override
- public ByteBuffer encode(Array chunkArray, ArrayMetadata.CoreArrayMetadata arrayMetadata) {
+ public ByteBuffer encode(Array chunkArray) {
return chunkArray.getDataAsByteBuffer(configuration.endian.getByteOrder());
}
@@ -72,7 +72,7 @@ public ByteOrder getByteOrder() {
}
}
- public static final class Configuration {
+ public static final class Configuration{
@Nonnull
public final BytesCodec.Endian endian;
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/Crc32cCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/Crc32cCodec.java
index 38be686..a1e3cb5 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/Crc32cCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/Crc32cCodec.java
@@ -9,17 +9,15 @@
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
-public class Crc32cCodec implements BytesBytesCodec {
+public class Crc32cCodec extends BytesBytesCodec {
public final String name = "crc32c";
@JsonCreator
- public Crc32cCodec(
- ) {
- }
+ public Crc32cCodec(){}
@Override
- public ByteBuffer decode(ByteBuffer chunkBytes, CoreArrayMetadata arrayMetadata)
+ public ByteBuffer decode(ByteBuffer chunkBytes)
throws ZarrException {
ByteBuffer buffer = chunkBytes.slice();
buffer.order(ByteOrder.LITTLE_ENDIAN);
@@ -45,7 +43,7 @@ public ByteBuffer decode(ByteBuffer chunkBytes, CoreArrayMetadata arrayMetadata)
}
@Override
- public ByteBuffer encode(ByteBuffer chunkBytes, CoreArrayMetadata arrayMetadata) {
+ public ByteBuffer encode(ByteBuffer chunkBytes) {
return Utils.makeByteBuffer(chunkBytes.capacity() + 4, b -> {
final CRC32C crc32c = new CRC32C();
crc32c.update(chunkBytes);
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/GzipCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/GzipCodec.java
index 1054542..3ff5acd 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/GzipCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/GzipCodec.java
@@ -16,7 +16,7 @@
import java.util.zip.GZIPOutputStream;
import javax.annotation.Nonnull;
-public class GzipCodec implements BytesBytesCodec {
+public class GzipCodec extends BytesBytesCodec {
public final String name = "gzip";
@Nonnull
@@ -37,7 +37,7 @@ private void copy(InputStream inputStream, OutputStream outputStream) throws IOE
}
@Override
- public ByteBuffer decode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
+ public ByteBuffer decode(ByteBuffer chunkBytes)
throws ZarrException {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); GZIPInputStream inputStream = new GZIPInputStream(
new ByteArrayInputStream(Utils.toArray(chunkBytes)))) {
@@ -50,7 +50,7 @@ public ByteBuffer decode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata
}
@Override
- public ByteBuffer encode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
+ public ByteBuffer encode(ByteBuffer chunkBytes)
throws ZarrException {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); GZIPOutputStream gzipStream = new GZIPOutputStream(
outputStream)) {
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/ShardingIndexedCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/ShardingIndexedCodec.java
index 75858a4..0dce734 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/ShardingIndexedCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/ShardingIndexedCodec.java
@@ -22,15 +22,13 @@
import ucar.ma2.InvalidRangeException;
-public class ShardingIndexedCodec implements ArrayBytesCodec, ArrayBytesCodec.WithPartialDecode {
+public class ShardingIndexedCodec extends ArrayBytesCodec.WithPartialDecode {
public final String name = "sharding_indexed";
@Nonnull
public final Configuration configuration;
- @Nonnull
- final CodecPipeline codecPipeline;
- @Nonnull
- final CodecPipeline indexCodecPipeline;
+ CodecPipeline codecPipeline;
+ CodecPipeline indexCodecPipeline;
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public ShardingIndexedCodec(
@@ -38,8 +36,18 @@ public ShardingIndexedCodec(
Configuration configuration
) throws ZarrException {
this.configuration = configuration;
- this.codecPipeline = new CodecPipeline(configuration.codecs);
- this.indexCodecPipeline = new CodecPipeline(configuration.indexCodecs);
+ }
+
+ @Override
+ public void setCoreArrayMetadata(CoreArrayMetadata arrayMetadata) throws ZarrException {
+ super.setCoreArrayMetadata(arrayMetadata);
+ final ArrayMetadata.CoreArrayMetadata shardMetadata =
+ new ArrayMetadata.CoreArrayMetadata(Utils.toLongArray(arrayMetadata.chunkShape),
+ configuration.chunkShape, arrayMetadata.dataType,
+ arrayMetadata.parsedFillValue
+ );
+ this.codecPipeline = new CodecPipeline(configuration.codecs, shardMetadata);
+ this.indexCodecPipeline = new CodecPipeline(configuration.indexCodecs, getShardIndexArrayMetadata(getChunksPerShard(arrayMetadata)));
}
ArrayMetadata.CoreArrayMetadata getShardIndexArrayMetadata(int[] chunksPerShard) {
@@ -85,20 +93,15 @@ void setValueFromShardIndexArray(Array shardIndexArray, long[] chunkCoords, int
}
@Override
- public Array decode(ByteBuffer shardBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
+ public Array decode(ByteBuffer shardBytes)
throws ZarrException {
return decodeInternal(new ByteBufferDataProvider(shardBytes), new long[arrayMetadata.ndim()],
arrayMetadata.chunkShape, arrayMetadata);
}
@Override
- public ByteBuffer encode(final Array shardArray,
- final ArrayMetadata.CoreArrayMetadata arrayMetadata) throws ZarrException {
- final ArrayMetadata.CoreArrayMetadata shardMetadata =
- new ArrayMetadata.CoreArrayMetadata(Utils.toLongArray(arrayMetadata.chunkShape),
- configuration.chunkShape, arrayMetadata.dataType,
- arrayMetadata.parsedFillValue
- );
+ public ByteBuffer encode(final Array shardArray) throws ZarrException {
+ final ArrayMetadata.CoreArrayMetadata shardMetadata = codecPipeline.arrayMetadata;
final int[] chunksPerShard = getChunksPerShard(arrayMetadata);
final int chunkCount = Arrays.stream(chunksPerShard)
.reduce(1, (r, a) -> r * a);
@@ -127,11 +130,14 @@ public ByteBuffer encode(final Array shardArray,
setValueFromShardIndexArray(shardIndexArray, chunkCoords, 0, -1);
setValueFromShardIndexArray(shardIndexArray, chunkCoords, 1, -1);
} else {
- final ByteBuffer chunkBytes = codecPipeline.encode(chunkArray, shardMetadata);
+ final ByteBuffer chunkBytes = codecPipeline.encode(chunkArray);
synchronized (chunkBytesList) {
int chunkByteOffset = chunkBytesList.stream()
- .mapToInt(ByteBuffer::capacity)
- .sum();
+ .mapToInt(ByteBuffer::capacity)
+ .sum();
+ if (configuration.indexLocation.equals("start")) {
+ chunkByteOffset += (int) getShardIndexSize(arrayMetadata);
+ }
setValueFromShardIndexArray(shardIndexArray, chunkCoords, 0, chunkByteOffset);
setValueFromShardIndexArray(shardIndexArray, chunkCoords, 1,
chunkBytes.capacity());
@@ -146,11 +152,15 @@ public ByteBuffer encode(final Array shardArray,
.mapToInt(ByteBuffer::capacity)
.sum() + (int) getShardIndexSize(arrayMetadata);
final ByteBuffer shardBytes = ByteBuffer.allocate(shardBytesLength);
+ if(configuration.indexLocation.equals("start")){
+ shardBytes.put(indexCodecPipeline.encode(shardIndexArray));
+ }
for (final ByteBuffer chunkBytes : chunkBytesList) {
shardBytes.put(chunkBytes);
}
- shardBytes.put(
- indexCodecPipeline.encode(shardIndexArray, getShardIndexArrayMetadata(chunksPerShard)));
+ if(configuration.indexLocation.equals("end")){
+ shardBytes.put(indexCodecPipeline.encode(shardIndexArray));
+ }
shardBytes.rewind();
return shardBytes;
}
@@ -172,25 +182,22 @@ private Array decodeInternal(
DataProvider dataProvider, long[] offset, int[] shape,
ArrayMetadata.CoreArrayMetadata arrayMetadata
) throws ZarrException {
+ final ArrayMetadata.CoreArrayMetadata shardMetadata = codecPipeline.arrayMetadata;
+
final Array outputArray = Array.factory(arrayMetadata.dataType.getMA2DataType(), shape);
- final int[] chunksPerShard = getChunksPerShard(arrayMetadata);
final int shardIndexByteLength = (int) getShardIndexSize(arrayMetadata);
- ByteBuffer shardIndexBytes = dataProvider.readSuffix(shardIndexByteLength);
-
+ ByteBuffer shardIndexBytes;
+ if (this.configuration.indexLocation.equals("start")) {
+ shardIndexBytes = dataProvider.readPrefix(shardIndexByteLength);
+ }else if(this.configuration.indexLocation.equals("end")){
+ shardIndexBytes = dataProvider.readSuffix(shardIndexByteLength);
+ }else{
+ throw new ZarrException("Only index_location \"start\" or \"end\" are supported.");
+ }
if (shardIndexBytes == null) {
throw new ZarrException("Could not read shard index.");
}
- final Array shardIndexArray = indexCodecPipeline.decode(
- shardIndexBytes,
- getShardIndexArrayMetadata(chunksPerShard)
- );
-
- final ArrayMetadata.CoreArrayMetadata shardMetadata =
- new ArrayMetadata.CoreArrayMetadata(Utils.toLongArray(arrayMetadata.chunkShape),
- configuration.chunkShape, arrayMetadata.dataType,
- arrayMetadata.parsedFillValue
- );
-
+ final Array shardIndexArray = indexCodecPipeline.decode(shardIndexBytes);
long[][] allChunkCoords = IndexingUtils.computeChunkCoords(shardMetadata.shape,
shardMetadata.chunkShape, offset,
shape);
@@ -215,7 +222,7 @@ private Array decodeInternal(
throw new ZarrException(String.format("Could not load byte data for chunk %s",
Arrays.toString(chunkCoords)));
}
- chunkArray = codecPipeline.decode(chunkBytes, shardMetadata);
+ chunkArray = codecPipeline.decode(chunkBytes);
}
if (chunkArray == null) {
chunkArray = shardMetadata.allocateFillValueChunk();
@@ -232,17 +239,13 @@ private Array decodeInternal(
}
@Override
- public Array decodePartial(
- StoreHandle chunkHandle, long[] offset, int[] shape,
- ArrayMetadata.CoreArrayMetadata arrayMetadata
- ) throws ZarrException {
+ public Array decodePartial(StoreHandle chunkHandle, long[] offset, int[] shape) throws ZarrException {
if (Arrays.equals(shape, arrayMetadata.chunkShape)) {
ByteBuffer chunkBytes = chunkHandle.read();
if (chunkBytes == null) {
return arrayMetadata.allocateFillValueChunk();
}
- return decodeInternal(new ByteBufferDataProvider(chunkHandle.read()), offset, shape,
- arrayMetadata);
+ return decodeInternal(new ByteBufferDataProvider(chunkHandle.read()), offset, shape, arrayMetadata);
}
return decodeInternal(new StoreHandleDataProvider(chunkHandle), offset, shape, arrayMetadata);
}
@@ -253,6 +256,8 @@ interface DataProvider {
ByteBuffer read(long start, long length);
ByteBuffer readSuffix(long suffixLength);
+
+ ByteBuffer readPrefix(long prefixLength);
}
public static final class Configuration {
@@ -260,19 +265,32 @@ public static final class Configuration {
@JsonProperty("chunk_shape")
public final int[] chunkShape;
@Nonnull
+ @JsonProperty("codecs")
public final Codec[] codecs;
@Nonnull
+ @JsonProperty("index_codecs")
public final Codec[] indexCodecs;
+ @Nonnull
+ @JsonProperty("index_location")
+ public String indexLocation;
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public Configuration(
- @JsonProperty(value = "chunk_shape", required = true) int[] chunkShape,
- @Nonnull @JsonProperty("codecs") Codec[] codecs,
- @Nonnull @JsonProperty("index_codecs") Codec[] indexCodecs
- ) {
+ @JsonProperty(value = "chunk_shape", required = true) int[] chunkShape,
+ @Nonnull @JsonProperty("codecs") Codec[] codecs,
+ @Nonnull @JsonProperty("index_codecs") Codec[] indexCodecs,
+ @JsonProperty(value = "index_location", defaultValue = "end") String indexLocation
+ ) throws ZarrException {
+ if (indexLocation == null) {
+ indexLocation = "end";
+ }
+ if (!indexLocation.equals("start") && !indexLocation.equals("end")) {
+ throw new ZarrException("Only index_location \"start\" or \"end\" are supported.");
+ }
this.chunkShape = chunkShape;
this.codecs = codecs;
this.indexCodecs = indexCodecs;
+ this.indexLocation = indexLocation;
}
}
@@ -293,6 +311,12 @@ public ByteBuffer readSuffix(long suffixLength) {
return bufferSlice.slice();
}
+ public ByteBuffer readPrefix(long prefixLength) {
+ ByteBuffer bufferSlice = buffer.slice();
+ bufferSlice.limit((int) (prefixLength));
+ return bufferSlice.slice();
+ }
+
@Override
public ByteBuffer read(long start, long length) {
ByteBuffer bufferSlice = buffer.slice();
@@ -317,6 +341,11 @@ public ByteBuffer readSuffix(long suffixLength) {
return storeHandle.read(-suffixLength);
}
+ @Override
+ public ByteBuffer readPrefix(long prefixLength) {
+ return storeHandle.read(0, prefixLength);
+ }
+
@Override
public ByteBuffer read(long start, long length) {
return storeHandle.read(start, start + length);
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/TransposeCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/TransposeCodec.java
index 69fb6fe..4d614ae 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/TransposeCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/TransposeCodec.java
@@ -5,64 +5,87 @@
import dev.zarr.zarrjava.ZarrException;
import dev.zarr.zarrjava.v3.ArrayMetadata;
import dev.zarr.zarrjava.v3.codec.ArrayArrayCodec;
-import javax.annotation.Nonnull;
import ucar.ma2.Array;
-public class TransposeCodec implements ArrayArrayCodec {
+import javax.annotation.Nonnull;
+import java.util.Arrays;
+
+import static dev.zarr.zarrjava.utils.Utils.inversePermutation;
+import static dev.zarr.zarrjava.utils.Utils.isPermutation;
- @Nonnull
- public final String name = "transpose";
- @Nonnull
- public final Configuration configuration;
+public class TransposeCodec extends ArrayArrayCodec {
- @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
- public TransposeCodec(
- @Nonnull @JsonProperty(value = "configuration", required = true) Configuration configuration
- ) {
- this.configuration = configuration;
- }
+ @Nonnull
+ public final String name = "transpose";
+ @Nonnull
+ public final Configuration configuration;
- int[] reverseDims(int ndim) {
- int[] dims = new int[ndim];
- for (int dimIdx = 0; dimIdx < ndim; dimIdx++) {
- dims[dimIdx] = ndim - dimIdx - 1;
+ @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
+ public TransposeCodec(
+ @Nonnull @JsonProperty(value = "configuration", required = true) Configuration configuration
+ ) {
+ this.configuration = configuration;
}
- return dims;
- }
- @Override
- public Array decode(Array chunkArray, ArrayMetadata.CoreArrayMetadata arrayMetadata) {
- if (configuration.order.equals("F")) {
- chunkArray.permute(reverseDims(arrayMetadata.ndim()));
+
+ @Override
+ public Array decode(Array chunkArray) throws ZarrException {
+ if (!isPermutation(configuration.order)){
+ throw new ZarrException("Order is no permutation array");
+ }
+ if (arrayMetadata.ndim() != configuration.order.length) {
+ throw new ZarrException("Array has not the same ndim as transpose codec order");
+ }
+ chunkArray = chunkArray.permute(inversePermutation(configuration.order));
+ return chunkArray;
}
- return chunkArray;
- }
- @Override
- public Array encode(Array chunkArray, ArrayMetadata.CoreArrayMetadata arrayMetadata) {
- if (configuration.order.equals("F")) {
- chunkArray.permute(reverseDims(arrayMetadata.ndim()));
+
+
+ @Override
+ public Array encode(Array chunkArray) throws ZarrException {
+ if (!isPermutation(configuration.order)){
+ throw new ZarrException("Order is no permutation array");
+ }
+ if (arrayMetadata.ndim() != configuration.order.length) {
+ throw new ZarrException("Array has not the same ndim as transpose codec order");
+ }
+ chunkArray = chunkArray.permute(configuration.order);
+ return chunkArray;
}
- return chunkArray;
- }
- @Override
- public long computeEncodedSize(long inputByteLength,
- ArrayMetadata.CoreArrayMetadata arrayMetadata) throws ZarrException {
- return inputByteLength;
- }
+ @Override
+ public long computeEncodedSize(long inputByteLength,
+ ArrayMetadata.CoreArrayMetadata arrayMetadata) throws ZarrException {
+ return inputByteLength;
+ }
- public static final class Configuration {
+ public static final class Configuration {
+ public final int[] order;
- public final String order;
+ @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
+ public Configuration(@JsonProperty(value = "order") int[] order) {
+ this.order = order;
+ }
+ }
- @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
- public Configuration(@JsonProperty(value = "order", defaultValue = "C") String order)
- throws ZarrException {
- if (!order.equals("C") && !order.equals("F")) {
- throw new ZarrException("Only 'C' or 'F' are supported.");
- }
- this.order = order;
+ @Override
+ protected ArrayMetadata.CoreArrayMetadata resolveArrayMetadata() throws ZarrException {
+ super.resolveArrayMetadata();
+ assert arrayMetadata.ndim() == configuration.order.length;
+
+ int[] transposedChunkShape = new int[arrayMetadata.ndim()];
+ Arrays.setAll(transposedChunkShape, i -> arrayMetadata.chunkShape[configuration.order[i]]);
+
+ //only chunk shape gets transformed, the outer shape stays the same
+ long[] transposedArrayShape = new long[arrayMetadata.ndim()];
+ Arrays.setAll(transposedArrayShape, i -> arrayMetadata.shape[i]/arrayMetadata.chunkShape[i]*transposedArrayShape[i]);
+
+ return new ArrayMetadata.CoreArrayMetadata(
+ transposedArrayShape,
+ transposedChunkShape,
+ arrayMetadata.dataType,
+ arrayMetadata.parsedFillValue
+ );
}
- }
}
diff --git a/src/main/java/dev/zarr/zarrjava/v3/codec/core/ZstdCodec.java b/src/main/java/dev/zarr/zarrjava/v3/codec/core/ZstdCodec.java
index 39a69ee..f042f11 100644
--- a/src/main/java/dev/zarr/zarrjava/v3/codec/core/ZstdCodec.java
+++ b/src/main/java/dev/zarr/zarrjava/v3/codec/core/ZstdCodec.java
@@ -2,89 +2,74 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.github.luben.zstd.ZstdInputStream;
-import com.github.luben.zstd.ZstdOutputStream;
+import com.github.luben.zstd.Zstd;
+import com.github.luben.zstd.ZstdCompressCtx;
import dev.zarr.zarrjava.ZarrException;
-import dev.zarr.zarrjava.utils.Utils;
import dev.zarr.zarrjava.v3.ArrayMetadata;
import dev.zarr.zarrjava.v3.codec.BytesBytesCodec;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import javax.annotation.Nonnull;
-public class ZstdCodec implements BytesBytesCodec {
+import javax.annotation.Nonnull;
+import java.nio.ByteBuffer;
- public final String name = "zstd";
- @Nonnull
- public final Configuration configuration;
+public class ZstdCodec extends BytesBytesCodec {
- @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
- public ZstdCodec(
- @Nonnull @JsonProperty(value = "configuration", required = true) Configuration configuration) {
- this.configuration = configuration;
- }
+ public final String name = "zstd";
+ @Nonnull
+ public final Configuration configuration;
- private void copy(InputStream inputStream, OutputStream outputStream) throws IOException {
- byte[] buffer = new byte[4096];
- int len;
- while ((len = inputStream.read(buffer)) > 0) {
- outputStream.write(buffer, 0, len);
+ @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
+ public ZstdCodec(
+ @Nonnull @JsonProperty(value = "configuration", required = true) Configuration configuration) {
+ this.configuration = configuration;
}
- }
- @Override
- public ByteBuffer decode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
- throws ZarrException {
- try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ZstdInputStream inputStream = new ZstdInputStream(
- new ByteArrayInputStream(Utils.toArray(chunkBytes)))) {
- copy(inputStream, outputStream);
- inputStream.close();
- return ByteBuffer.wrap(outputStream.toByteArray());
- } catch (IOException ex) {
- throw new ZarrException("Error in decoding zstd.", ex);
+ @Override
+ public ByteBuffer decode(ByteBuffer compressedBytes) throws ZarrException {
+ byte[] compressedArray = compressedBytes.array();
+
+ long originalSize = Zstd.decompressedSize(compressedArray);
+ if (originalSize == 0) {
+ throw new ZarrException("Failed to get decompressed size");
+ }
+
+ byte[] decompressed = Zstd.decompress(compressedArray, (int) originalSize);
+ return ByteBuffer.wrap(decompressed);
}
- }
- @Override
- public ByteBuffer encode(ByteBuffer chunkBytes, ArrayMetadata.CoreArrayMetadata arrayMetadata)
- throws ZarrException {
- try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ZstdOutputStream zstdStream = new ZstdOutputStream(
- outputStream, configuration.level).setChecksum(
- configuration.checksum)) {
- zstdStream.write(Utils.toArray(chunkBytes));
- zstdStream.close();
- return ByteBuffer.wrap(outputStream.toByteArray());
- } catch (IOException ex) {
- throw new ZarrException("Error in decoding zstd.", ex);
+ @Override
+ public ByteBuffer encode(ByteBuffer chunkBytes) throws ZarrException {
+ byte[] arr = chunkBytes.array();
+ byte[] compressed;
+ try (ZstdCompressCtx ctx = new ZstdCompressCtx()) {
+ ctx.setLevel(configuration.level);
+ ctx.setChecksum(configuration.checksum);
+ compressed = ctx.compress(arr);
+ }
+ return ByteBuffer.wrap(compressed);
}
- }
- @Override
- public long computeEncodedSize(long inputByteLength,
- ArrayMetadata.CoreArrayMetadata arrayMetadata) throws ZarrException {
- throw new ZarrException("Not implemented for Zstd codec.");
- }
+ @Override
+ public long computeEncodedSize(long inputByteLength,
+ ArrayMetadata.CoreArrayMetadata arrayMetadata) throws ZarrException {
+ throw new ZarrException("Not implemented for Zstd codec.");
+ }
- public static final class Configuration {
+ public static final class Configuration {
- public final int level;
- public final boolean checksum;
+ public final int level;
+ public final boolean checksum;
- @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
- public Configuration(@JsonProperty(value = "level", defaultValue = "5") int level,
- @JsonProperty(value = "checksum", defaultValue = "true") boolean checksum)
- throws ZarrException {
- if (level < -131072 || level > 22) {
- throw new ZarrException("'level' needs to be between -131072 and 22.");
- }
- this.level = level;
- this.checksum = checksum;
+ @JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
+ public Configuration(@JsonProperty(value = "level", defaultValue = "5") int level,
+ @JsonProperty(value = "checksum", defaultValue = "true") boolean checksum)
+ throws ZarrException {
+ if (level < -131072 || level > 22) {
+ throw new ZarrException("'level' needs to be between -131072 and 22.");
+ }
+ this.level = level;
+ this.checksum = checksum;
+ }
}
- }
}
diff --git a/src/test/java/dev/zarr/zarrjava/TestUtils.java b/src/test/java/dev/zarr/zarrjava/TestUtils.java
new file mode 100644
index 0000000..8165b4a
--- /dev/null
+++ b/src/test/java/dev/zarr/zarrjava/TestUtils.java
@@ -0,0 +1,32 @@
+package dev.zarr.zarrjava;
+
+
+import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+
+import java.util.Arrays;
+
+import static dev.zarr.zarrjava.utils.Utils.inversePermutation;
+import static dev.zarr.zarrjava.utils.Utils.isPermutation;
+import static org.junit.Assert.assertFalse;
+
+public class TestUtils {
+ @Test
+ public void testIsPermutation(){
+ assert isPermutation(new int[]{2, 1, 0});
+ assert isPermutation(new int[]{4, 2, 1, 3, 0});
+ assert !isPermutation(new int[]{0, 1, 2, 0});
+ assert !isPermutation(new int[]{0, 1, 2, 3, 5});
+ assert !isPermutation(new int[]{});
+ }
+
+ @Test
+ public void testInversePermutation(){
+ Assertions.assertArrayEquals(new int[]{1, 0, 2}, inversePermutation(new int[]{1, 0, 2}));
+ Assertions.assertArrayEquals(new int[]{2, 0, 1}, inversePermutation(new int[]{1, 2, 0}));
+ Assertions.assertArrayEquals(new int[]{0, 3, 2, 4, 1}, inversePermutation(new int[]{0, 4, 2, 1, 3}));
+ Assertions.assertFalse(Arrays.equals(new int[]{2, 0, 1}, inversePermutation(new int[]{2, 0, 1})));
+ }
+
+}
+
diff --git a/src/test/java/dev/zarr/zarrjava/ZarrTest.java b/src/test/java/dev/zarr/zarrjava/ZarrTest.java
index a529bee..b9632be 100644
--- a/src/test/java/dev/zarr/zarrjava/ZarrTest.java
+++ b/src/test/java/dev/zarr/zarrjava/ZarrTest.java
@@ -1,217 +1,449 @@
package dev.zarr.zarrjava;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.AnonymousAWSCredentials;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.luben.zstd.Zstd;
+import com.github.luben.zstd.ZstdCompressCtx;
import dev.zarr.zarrjava.store.FilesystemStore;
import dev.zarr.zarrjava.store.HttpStore;
import dev.zarr.zarrjava.store.S3Store;
+import dev.zarr.zarrjava.store.StoreHandle;
import dev.zarr.zarrjava.utils.MultiArrayUtils;
-import dev.zarr.zarrjava.v3.Array;
-import dev.zarr.zarrjava.v3.ArrayMetadata;
-import dev.zarr.zarrjava.v3.DataType;
-import dev.zarr.zarrjava.v3.Group;
-import dev.zarr.zarrjava.v3.GroupMetadata;
-import dev.zarr.zarrjava.v3.Node;
-import java.io.File;
-import java.io.IOException;
+import dev.zarr.zarrjava.v3.*;
+import dev.zarr.zarrjava.v3.codec.CodecBuilder;
+import dev.zarr.zarrjava.v3.codec.core.TransposeCodec;
+import jdk.jshell.spi.ExecutionControl;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.ValueSource;
+import ucar.ma2.MAMath;
+
+import java.io.*;
+import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Comparator;
-import java.util.HashMap;
+import java.util.Map;
import java.util.stream.Stream;
-import org.junit.Before;
-import org.junit.Test;
+
+import static org.junit.Assert.assertThrows;
public class ZarrTest {
- final Path TESTDATA = Paths.get("testdata");
- final Path TESTOUTPUT = Paths.get("testoutput");
-
- @Before
- public void clearTestoutputFolder() throws IOException {
- if (Files.exists(TESTOUTPUT)) {
- try (Stream walk = Files.walk(TESTOUTPUT)) {
- walk.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
- }
- }
- Files.createDirectory(TESTOUTPUT);
- }
-
- @Test
- public void testFileSystemStores() throws IOException, ZarrException {
- FilesystemStore fsStore = new FilesystemStore(TESTDATA);
- ObjectMapper objectMapper = Node.makeObjectMapper();
-
- GroupMetadata group = objectMapper.readValue(
- Files.readAllBytes(TESTDATA.resolve("l4_sample").resolve("zarr.json")),
- GroupMetadata.class
- );
-
- System.out.println(group);
- System.out.println(objectMapper.writeValueAsString(group));
-
- ArrayMetadata arrayMetadata = objectMapper.readValue(Files.readAllBytes(TESTDATA.resolve(
- "l4_sample").resolve("color").resolve("1").resolve("zarr.json")),
- ArrayMetadata.class);
-
- System.out.println(arrayMetadata);
- System.out.println(objectMapper.writeValueAsString(arrayMetadata));
-
- System.out.println(
- Array.open(fsStore.resolve("l4_sample", "color", "1")));
- System.out.println(
- Arrays.toString(Group.open(fsStore.resolve("l4_sample")).list().toArray(Node[]::new)));
- System.out.println(
- Arrays.toString(((Group) Group.open(fsStore.resolve("l4_sample")).get("color")).list()
- .toArray(Node[]::new)));
- }
-
- @Test
- public void testS3Store() throws IOException, ZarrException {
- S3Store s3Store = new S3Store(AmazonS3ClientBuilder.standard()
- .withRegion("eu-west-1")
- .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
- .build(), "static.webknossos.org", "data");
- System.out.println(Array.open(s3Store.resolve("zarr_v3", "l4_sample", "color", "1")));
- }
-
- @Test
- public void testHttpStore() throws IOException, ZarrException {
- HttpStore httpStore = new HttpStore("https://static.webknossos.org/data/");
- System.out.println(
- dev.zarr.zarrjava.v2.Array.open(httpStore.resolve("l4_sample", "color", "1")));
- System.out.println(Array.open(httpStore.resolve("zarr_v3", "l4_sample", "color", "1")));
- }
-
- @Test
- public void testV3ShardingReadCutout() throws IOException, ZarrException {
- Array array = Array.open(new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "1"));
-
- ucar.ma2.Array outArray = array.read(new long[]{0, 3073, 3073, 513}, new int[]{1, 64, 64, 64});
- assertEquals(outArray.getSize(), 64 * 64 * 64);
- assertEquals(outArray.getByte(0), -98);
- }
-
- @Test
- public void testV3Access() throws IOException, ZarrException {
- Array readArray = Array.open(new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "1"));
-
- ucar.ma2.Array outArray = readArray.access().withOffset(0, 3073, 3073, 513)
- .withShape(1, 64, 64, 64)
- .read();
- assertEquals(outArray.getSize(), 64 * 64 * 64);
- assertEquals(outArray.getByte(0), -98);
-
- Array writeArray = Array.create(
- new FilesystemStore(TESTOUTPUT).resolve("l4_sample_2", "color", "1"),
- readArray.metadata
- );
- writeArray.access().withOffset(0, 3073, 3073, 513).write(outArray);
- }
-
- @Test
- public void testV3ShardingReadWrite() throws IOException, ZarrException {
- Array readArray = Array.open(
- new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "8-8-2"));
- ucar.ma2.Array readArrayContent = readArray.read();
- Array writeArray = Array.create(
- new FilesystemStore(TESTOUTPUT).resolve("l4_sample_3", "color", "8-8-2"),
- readArray.metadata
- );
- writeArray.write(readArrayContent);
- ucar.ma2.Array outArray = writeArray.read();
-
- assert MultiArrayUtils.allValuesEqual(outArray, readArrayContent);
- }
-
- @Test
- public void testV3Codecs() throws IOException, ZarrException {
- Array readArray = Array.open(
- new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "8-8-2"));
- ucar.ma2.Array readArrayContent = readArray.read();
- {
- Array gzipArray = Array.create(
- new FilesystemStore(TESTOUTPUT).resolve("l4_sample_gzip", "color", "8-8-2"),
- Array.metadataBuilder(readArray.metadata).withCodecs(c -> c.withGzip(5)).build()
- );
- gzipArray.write(readArrayContent);
- ucar.ma2.Array outGzipArray = gzipArray.read();
- assert MultiArrayUtils.allValuesEqual(outGzipArray, readArrayContent);
- }
- {
- Array bloscArray = Array.create(
- new FilesystemStore(TESTOUTPUT).resolve("l4_sample_blosc", "color", "8-8-2"),
- Array.metadataBuilder(readArray.metadata).withCodecs(c -> c.withBlosc("zstd", 5)).build()
- );
- bloscArray.write(readArrayContent);
- ucar.ma2.Array outBloscArray = bloscArray.read();
- assert MultiArrayUtils.allValuesEqual(outBloscArray, readArrayContent);
- }
- {
- Array zstdArray = Array.create(
- new FilesystemStore(TESTOUTPUT).resolve("l4_sample_zstd", "color", "8-8-2"),
- Array.metadataBuilder(readArray.metadata).withCodecs(c -> c.withZstd(10)).build()
- );
- zstdArray.write(readArrayContent);
- ucar.ma2.Array outZstdArray = zstdArray.read();
- assert MultiArrayUtils.allValuesEqual(outZstdArray, readArrayContent);
- }
- }
-
- @Test
- public void testV3ArrayMetadataBuilder() throws ZarrException {
- Array.metadataBuilder()
- .withShape(1, 4096, 4096, 1536)
- .withDataType(DataType.UINT32)
- .withChunkShape(1, 1024, 1024, 1024)
- .withFillValue(0)
- .withCodecs(
- c -> c.withSharding(new int[]{1, 32, 32, 32}, c1 -> c1.withBlosc()))
- .build();
- }
-
- @Test
- public void testV3FillValue() throws ZarrException {
- assertEquals((int) ArrayMetadata.parseFillValue(0, DataType.UINT32), 0);
- assertEquals((int) ArrayMetadata.parseFillValue("0x00010203", DataType.UINT32), 50462976);
- assertEquals((byte) ArrayMetadata.parseFillValue("0b00000010", DataType.UINT8), 2);
- assert Double.isNaN((double) ArrayMetadata.parseFillValue("NaN", DataType.FLOAT64));
- assert Double.isInfinite((double) ArrayMetadata.parseFillValue("-Infinity", DataType.FLOAT64));
- }
-
- @Test
- public void testV3Group() throws IOException, ZarrException {
- FilesystemStore fsStore = new FilesystemStore(TESTOUTPUT);
-
- Group group = Group.create(fsStore.resolve("testgroup"));
- Group group2 = group.createGroup("test2", new HashMap() {{
- put("hello", "world");
- }});
- Array array = group2.createArray("array", b ->
- b.withShape(10, 10)
- .withDataType(DataType.UINT8)
- .withChunkShape(5, 5)
- );
- array.write(new long[]{2, 2}, ucar.ma2.Array.factory(ucar.ma2.DataType.UBYTE, new int[]{8, 8}));
-
- assertArrayEquals(
- ((Array) ((Group) group.listAsArray()[0]).listAsArray()[0]).metadata.chunkShape(),
- new int[]{5, 5});
- }
-
- @Test
- public void testV2() throws IOException, ZarrException {
- FilesystemStore fsStore = new FilesystemStore("");
- HttpStore httpStore = new HttpStore("https://static.webknossos.org/data");
-
- System.out.println(
- dev.zarr.zarrjava.v2.Array.open(httpStore.resolve("l4_sample", "color", "1")));
- }
+ final static Path TESTDATA = Paths.get("testdata");
+ final static Path TESTOUTPUT = Paths.get("testoutput");
+ final static Path PYTHON_TEST_PATH = Paths.get("src/test/python-scripts/");
+
+ public static String pythonPath() {
+ if (System.getProperty("os.name").startsWith("Windows")) {
+ return "venv_zarrita\\Scripts\\python.exe";
+ }
+ return "venv_zarrita/bin/python";
+ }
+
+ @BeforeAll
+ public static void clearTestoutputFolder() throws IOException {
+ if (Files.exists(TESTOUTPUT)) {
+ try (Stream walk = Files.walk(TESTOUTPUT)) {
+ walk.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
+ }
+ }
+ Files.createDirectory(TESTOUTPUT);
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "blosc,blosclz_noshuffle_0", "blosc,lz4_shuffle_6", "blosc,lz4hc_bitshuffle_3", "blosc,zlib_shuffle_5", "blosc,zstd_bitshuffle_9",
+ "gzip,0", "gzip,5",
+ "zstd,0_true", "zstd,5_true","zstd,0_false", "zstd,5_false",
+ "bytes,BIG", "bytes,LITTLE",
+ "transpose,_",
+ "sharding,start", "sharding,end",
+ "sharding_nested,_",
+ "crc32c,_",
+ }) public void testReadFromZarrita(String codec, String codecParam) throws IOException, ZarrException, InterruptedException {
+ String command = pythonPath();
+ ProcessBuilder pb = new ProcessBuilder(command, PYTHON_TEST_PATH.resolve("zarrita_write.py").toString(), codec, codecParam, TESTOUTPUT.toString());
+ Process process = pb.start();
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
+ String line;
+ while ((line = reader.readLine()) != null) {
+ System.out.println(line);
+ }
+
+ BufferedReader readerErr = new BufferedReader(new InputStreamReader(process.getErrorStream()));
+ while ((line = readerErr.readLine()) != null) {
+ System.err.println(line);
+ }
+
+ int exitCode = process.waitFor();
+ assert exitCode == 0;
+
+ Array array = Array.open(new FilesystemStore(TESTOUTPUT).resolve("read_from_zarrita", codec, codecParam));
+ ucar.ma2.Array result = array.read();
+
+ //for expected values see zarrita_write.py
+ Assertions.assertArrayEquals(new int[]{16, 16}, result.getShape());
+ Assertions.assertEquals(DataType.INT32, array.metadata.dataType);
+ Assertions.assertArrayEquals(new int[]{2, 8}, array.metadata.chunkShape());
+ Assertions.assertEquals(42, array.metadata.attributes.get("answer"));
+
+ int[] expectedData = new int[16 * 16];
+ Arrays.setAll(expectedData, p -> p);
+ Assertions.assertArrayEquals(expectedData, (int[]) result.get1DJavaArray(ucar.ma2.DataType.INT));
+ }
+
+ @CsvSource({"0,true", "0,false", "5, true", "10, false"})
+ @ParameterizedTest
+ public void testZstdLibrary(int clevel, boolean checksumFlag) throws IOException, InterruptedException {
+ //compress using ZstdCompressCtx
+ int number = 123456;
+ byte[] src = ByteBuffer.allocate(4).putInt(number).array();
+ byte[] compressed;
+ try (ZstdCompressCtx ctx = new ZstdCompressCtx()) {
+ ctx.setLevel(clevel);
+ ctx.setChecksum(checksumFlag);
+ compressed = ctx.compress(src);
+ }
+ //decompress with Zstd.decompress
+ long originalSize = Zstd.decompressedSize(compressed);
+ byte[] decompressed = Zstd.decompress(compressed, (int) originalSize);
+ Assertions.assertEquals(number, ByteBuffer.wrap(decompressed).getInt());
+
+ //write compressed to file
+ String compressedDataPath = TESTOUTPUT.resolve("compressed" + clevel + checksumFlag + ".bin").toString();
+ try (FileOutputStream fos = new FileOutputStream(compressedDataPath)) {
+ fos.write(compressed);
+ }
+
+ //decompress in python
+ Process process = new ProcessBuilder(
+ pythonPath(),
+ PYTHON_TEST_PATH.resolve("zstd_decompress.py").toString(),
+ compressedDataPath,
+ Integer.toString(number)
+ ).start();
+ int exitCode = process.waitFor();
+ assert exitCode == 0;
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "blosc,blosclz_noshuffle_0", "blosc,lz4_shuffle_6", "blosc,lz4hc_bitshuffle_3", "blosc,zlib_shuffle_5", "blosc,zstd_bitshuffle_9",
+ "gzip,0", "gzip,5",
+ "zstd,0_true", "zstd,5_true","zstd,0_false", "zstd,5_false",
+ "bytes,BIG", "bytes,LITTLE",
+ "transpose,_",
+ "sharding,start", "sharding,end",
+ "sharding_nested,_",
+ "crc32c,_",
+ })
+ public void testWriteReadWithZarrita(String codec, String codecParam) throws Exception {
+ int[] testData = new int[16 * 16 * 16];
+ Arrays.setAll(testData, p -> p);
+
+ StoreHandle storeHandle = new FilesystemStore(TESTOUTPUT).resolve("write_to_zarrita", codec, codecParam);
+ ArrayMetadataBuilder builder = Array.metadataBuilder()
+ .withShape(16, 16, 16)
+ .withDataType(DataType.UINT32)
+ .withChunkShape(2, 4, 8)
+ .withFillValue(0)
+ .withAttributes(Map.of("test_key", "test_value"));
+
+ switch (codec) {
+ case "blosc":
+ String cname = codecParam.split("_")[0];
+ String shuffle = codecParam.split("_")[1];
+ int clevel_blosc = Integer.parseInt(codecParam.split("_")[2]);
+ builder = builder.withCodecs(c -> c.withBlosc(cname, shuffle, clevel_blosc));
+ break;
+ case "gzip":
+ builder = builder.withCodecs(c -> c.withGzip(Integer.parseInt(codecParam)));
+ break;
+ case "zstd":
+ int clevel_zstd = Integer.parseInt(codecParam.split("_")[0]);
+ boolean checksum = Boolean.parseBoolean(codecParam.split("_")[1]);
+ builder = builder.withCodecs(c -> c.withZstd(clevel_zstd, checksum));
+ break;
+ case "bytes":
+ builder = builder.withCodecs(c -> c.withBytes(codecParam));
+ break;
+ case "transpose":
+ builder = builder.withCodecs(c -> c.withTranspose(new int[]{1, 0, 2}));
+ break;
+ case "sharding":
+ builder = builder.withCodecs(c -> c.withSharding(new int[]{2, 2, 4}, c1 -> c1.withBytes("LITTLE"), codecParam));
+ break;
+ case "sharding_nested":
+ builder = builder.withCodecs(c -> c.withSharding(new int[]{2, 2, 4}, c1 -> c1.withSharding(new int[]{2, 1, 2}, c2 -> c2.withBytes("LITTLE"))));
+ break;
+ case "crc32c":
+ builder = builder.withCodecs(CodecBuilder::withCrc32c);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid Codec: " + codec);
+ }
+
+ Array writeArray = Array.create(storeHandle, builder.build());
+ writeArray.write(ucar.ma2.Array.factory(ucar.ma2.DataType.UINT, new int[]{16, 16, 16}, testData));
+
+ //read in zarr-java
+ Array readArray = Array.open(storeHandle);
+ ucar.ma2.Array result = readArray.read();
+
+ Assertions.assertArrayEquals(new int[]{16, 16, 16}, result.getShape());
+ Assertions.assertEquals(DataType.UINT32, readArray.metadata.dataType);
+ Assertions.assertArrayEquals(new int[]{2, 4, 8}, readArray.metadata.chunkShape());
+ Assertions.assertEquals("test_value", readArray.metadata.attributes.get("test_key"));
+
+ Assertions.assertArrayEquals(testData, (int[]) result.get1DJavaArray(ucar.ma2.DataType.INT));
+
+ //read in zarrita
+ String command = pythonPath();
+
+ ProcessBuilder pb = new ProcessBuilder(command, PYTHON_TEST_PATH.resolve("zarrita_read.py").toString(), codec, codecParam, TESTOUTPUT.toString());
+ Process process = pb.start();
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
+ String line;
+ while ((line = reader.readLine()) != null) {
+ System.out.println(line);
+ }
+
+ BufferedReader readerErr = new BufferedReader(new InputStreamReader(process.getErrorStream()));
+ while ((line = readerErr.readLine()) != null) {
+ System.err.println(line);
+ }
+
+ int exitCode = process.waitFor();
+ assert exitCode == 0;
+ }
+
+ @ParameterizedTest
+ @CsvSource({"0,true", "0,false", "5, true", "5, false"})
+ public void testZstdCodecReadWrite(int clevel, boolean checksum) throws ZarrException, IOException {
+ int[] testData = new int[16 * 16 * 16];
+ Arrays.setAll(testData, p -> p);
+
+ StoreHandle storeHandle = new FilesystemStore(TESTOUTPUT).resolve("testZstdCodecReadWrite", "checksum_" + checksum, "clevel_" + clevel);
+ ArrayMetadataBuilder builder = Array.metadataBuilder()
+ .withShape(16, 16, 16)
+ .withDataType(DataType.UINT32)
+ .withChunkShape(2, 4, 8)
+ .withFillValue(0)
+ .withCodecs(c -> c.withZstd(clevel, checksum));
+ Array writeArray = Array.create(storeHandle, builder.build());
+ writeArray.write(ucar.ma2.Array.factory(ucar.ma2.DataType.UINT, new int[]{16, 16, 16}, testData));
+
+ Array readArray = Array.open(storeHandle);
+ ucar.ma2.Array result = readArray.read();
+
+ Assertions.assertArrayEquals(testData, (int[]) result.get1DJavaArray(ucar.ma2.DataType.INT));
+ }
+
+ @Test
+ public void testTransposeCodec() throws ZarrException {
+ ucar.ma2.Array testData = ucar.ma2.Array.factory(ucar.ma2.DataType.UINT, new int[]{2, 3, 3}, new int[]{
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17});
+ ucar.ma2.Array testDataTransposed120 = ucar.ma2.Array.factory(ucar.ma2.DataType.UINT, new int[]{3, 3, 2}, new int[]{
+ 0, 9, 1, 10, 2, 11, 3, 12, 4, 13, 5, 14, 6, 15, 7, 16, 8, 17});
+
+ ArrayMetadata.CoreArrayMetadata metadata = new ArrayMetadata.CoreArrayMetadata(
+ new long[]{2, 3, 3},
+ new int[]{2, 3, 3},
+ DataType.UINT32,
+ null);
+ TransposeCodec transposeCodec = new TransposeCodec(new TransposeCodec.Configuration(new int[]{1, 2, 0}));
+ TransposeCodec transposeCodecWrongOrder1 = new TransposeCodec(new TransposeCodec.Configuration(new int[]{1, 2, 2}));
+ TransposeCodec transposeCodecWrongOrder2 = new TransposeCodec(new TransposeCodec.Configuration(new int[]{1, 2, 3}));
+ TransposeCodec transposeCodecWrongOrder3 = new TransposeCodec(new TransposeCodec.Configuration(new int[]{1, 2, 3, 0}));
+ transposeCodec.setCoreArrayMetadata(metadata);
+ transposeCodecWrongOrder1.setCoreArrayMetadata(metadata);
+ transposeCodecWrongOrder2.setCoreArrayMetadata(metadata);
+ transposeCodecWrongOrder3.setCoreArrayMetadata(metadata);
+
+ assert MAMath.equals(testDataTransposed120, transposeCodec.encode(testData));
+ assert MAMath.equals(testData, transposeCodec.decode(testDataTransposed120));
+ assertThrows(ZarrException.class, () -> transposeCodecWrongOrder1.encode(testData));
+ assertThrows(ZarrException.class, () -> transposeCodecWrongOrder2.encode(testData));
+ assertThrows(ZarrException.class, () -> transposeCodecWrongOrder3.encode(testData));
+ }
+
+ @Test
+ public void testFileSystemStores() throws IOException, ZarrException {
+ FilesystemStore fsStore = new FilesystemStore(TESTDATA);
+ ObjectMapper objectMapper = Node.makeObjectMapper();
+
+ GroupMetadata group = objectMapper.readValue(
+ Files.readAllBytes(TESTDATA.resolve("l4_sample").resolve("zarr.json")),
+ GroupMetadata.class
+ );
+
+ System.out.println(group);
+ System.out.println(objectMapper.writeValueAsString(group));
+
+ ArrayMetadata arrayMetadata = objectMapper.readValue(Files.readAllBytes(TESTDATA.resolve(
+ "l4_sample").resolve("color").resolve("1").resolve("zarr.json")),
+ ArrayMetadata.class);
+
+ System.out.println(arrayMetadata);
+ System.out.println(objectMapper.writeValueAsString(arrayMetadata));
+
+ System.out.println(
+ Array.open(fsStore.resolve("l4_sample", "color", "1")));
+ System.out.println(
+ Arrays.toString(Group.open(fsStore.resolve("l4_sample")).list().toArray(Node[]::new)));
+ System.out.println(
+ Arrays.toString(((Group) Group.open(fsStore.resolve("l4_sample")).get("color")).list()
+ .toArray(Node[]::new)));
+ }
+
+ @Test
+ public void testS3Store() throws IOException, ZarrException {
+ S3Store s3Store = new S3Store(AmazonS3ClientBuilder.standard()
+ .withRegion("eu-west-1")
+ .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
+ .build(), "static.webknossos.org", "data");
+ System.out.println(Array.open(s3Store.resolve("zarr_v3", "l4_sample", "color", "1")));
+ }
+
+ @Test
+ public void testHttpStore() throws IOException, ZarrException {
+ HttpStore httpStore = new HttpStore("https://static.webknossos.org/data/");
+ System.out.println(
+ dev.zarr.zarrjava.v2.Array.open(httpStore.resolve("l4_sample", "color", "1")));
+ System.out.println(Array.open(httpStore.resolve("zarr_v3", "l4_sample", "color", "1")));
+ }
+
+ @Test
+ public void testV3ShardingReadCutout() throws IOException, ZarrException {
+ Array array = Array.open(new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "1"));
+
+ ucar.ma2.Array outArray = array.read(new long[]{0, 3073, 3073, 513}, new int[]{1, 64, 64, 64});
+ Assertions.assertEquals(outArray.getSize(), 64 * 64 * 64);
+ Assertions.assertEquals(outArray.getByte(0), -98);
+ }
+
+ @Test
+ public void testV3Access() throws IOException, ZarrException {
+ Array readArray = Array.open(new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "1"));
+
+ ucar.ma2.Array outArray = readArray.access().withOffset(0, 3073, 3073, 513)
+ .withShape(1, 64, 64, 64)
+ .read();
+ Assertions.assertEquals(outArray.getSize(), 64 * 64 * 64);
+ Assertions.assertEquals(outArray.getByte(0), -98);
+
+ Array writeArray = Array.create(
+ new FilesystemStore(TESTOUTPUT).resolve("l4_sample_2", "color", "1"),
+ readArray.metadata
+ );
+ writeArray.access().withOffset(0, 3073, 3073, 513).write(outArray);
+ }
+
+ @ParameterizedTest
+ @ValueSource(strings = {"start", "end"})
+ public void testV3ShardingReadWrite(String indexLocation) throws IOException, ZarrException {
+ Array readArray = Array.open(
+ new FilesystemStore(TESTDATA).resolve("sharding_index_location", indexLocation));
+ ucar.ma2.Array readArrayContent = readArray.read();
+ Array writeArray = Array.create(
+ new FilesystemStore(TESTOUTPUT).resolve("sharding_index_location", indexLocation),
+ readArray.metadata
+ );
+ writeArray.write(readArrayContent);
+ ucar.ma2.Array outArray = writeArray.read();
+
+ assert MultiArrayUtils.allValuesEqual(readArrayContent, outArray);
+ }
+
+ @Test
+ public void testV3Codecs() throws IOException, ZarrException {
+ int[] readShape = new int[]{1, 1, 1024, 1024};
+ Array readArray = Array.open(
+ new FilesystemStore(TESTDATA).resolve("l4_sample", "color", "8-8-2"));
+ ucar.ma2.Array readArrayContent = readArray.read(new long[4], readShape);
+ {
+ Array gzipArray = Array.create(
+ new FilesystemStore(TESTOUTPUT).resolve("l4_sample_gzip", "color", "8-8-2"),
+ Array.metadataBuilder(readArray.metadata).withCodecs(c -> c.withGzip(5)).build()
+ );
+ gzipArray.write(readArrayContent);
+ ucar.ma2.Array outGzipArray = gzipArray.read(new long[4], readShape);
+ assert MultiArrayUtils.allValuesEqual(outGzipArray, readArrayContent);
+ }
+ {
+ Array bloscArray = Array.create(
+ new FilesystemStore(TESTOUTPUT).resolve("l4_sample_blosc", "color", "8-8-2"),
+ Array.metadataBuilder(readArray.metadata).withCodecs(c -> c.withBlosc("zstd", 5)).build()
+ );
+ bloscArray.write(readArrayContent);
+ ucar.ma2.Array outBloscArray = bloscArray.read(new long[4], readShape);
+ assert MultiArrayUtils.allValuesEqual(outBloscArray, readArrayContent);
+ }
+ {
+ Array zstdArray = Array.create(
+ new FilesystemStore(TESTOUTPUT).resolve("l4_sample_zstd", "color", "8-8-2"),
+ Array.metadataBuilder(readArray.metadata).withCodecs(c -> c.withZstd(10)).build()
+ );
+ zstdArray.write(readArrayContent);
+ ucar.ma2.Array outZstdArray = zstdArray.read(new long[4], readShape);
+ assert MultiArrayUtils.allValuesEqual(outZstdArray, readArrayContent);
+ }
+ }
+
+ @Test
+ public void testV3ArrayMetadataBuilder() throws ZarrException {
+ Array.metadataBuilder()
+ .withShape(1, 4096, 4096, 1536)
+ .withDataType(DataType.UINT32)
+ .withChunkShape(1, 1024, 1024, 1024)
+ .withFillValue(0)
+ .withCodecs(
+ c -> c.withSharding(new int[]{1, 32, 32, 32}, CodecBuilder::withBlosc))
+ .build();
+ }
+
+ @Test
+ public void testV3FillValue() throws ZarrException {
+ Assertions.assertEquals((int) ArrayMetadata.parseFillValue(0, DataType.UINT32), 0);
+ Assertions.assertEquals((int) ArrayMetadata.parseFillValue("0x00010203", DataType.UINT32), 50462976);
+ Assertions.assertEquals((byte) ArrayMetadata.parseFillValue("0b00000010", DataType.UINT8), 2);
+ assert Double.isNaN((double) ArrayMetadata.parseFillValue("NaN", DataType.FLOAT64));
+ assert Double.isInfinite((double) ArrayMetadata.parseFillValue("-Infinity", DataType.FLOAT64));
+ }
+
+ @Test
+ public void testV3Group() throws IOException, ZarrException {
+ FilesystemStore fsStore = new FilesystemStore(TESTOUTPUT);
+
+ Group group = Group.create(fsStore.resolve("testgroup"));
+ Group group2 = group.createGroup("test2", Map.of("hello", "world"));
+ Array array = group2.createArray("array", b ->
+ b.withShape(10, 10)
+ .withDataType(DataType.UINT8)
+ .withChunkShape(5, 5)
+ );
+ array.write(new long[]{2, 2}, ucar.ma2.Array.factory(ucar.ma2.DataType.UBYTE, new int[]{8, 8}));
+
+ Assertions.assertArrayEquals(((Array) ((Group) group.listAsArray()[0]).listAsArray()[0]).metadata.chunkShape(), new int[]{5, 5});
+ }
+
+ @Test
+ public void testV2() throws IOException {
+ FilesystemStore fsStore = new FilesystemStore("");
+ HttpStore httpStore = new HttpStore("https://static.webknossos.org/data");
+
+ System.out.println(dev.zarr.zarrjava.v2.Array.open(httpStore.resolve("l4_sample", "color", "1")));
+ }
+
+
}
diff --git a/src/test/python-scripts/zarrita_read.py b/src/test/python-scripts/zarrita_read.py
new file mode 100644
index 0000000..f84bf9b
--- /dev/null
+++ b/src/test/python-scripts/zarrita_read.py
@@ -0,0 +1,53 @@
+import sys
+
+import numpy as np
+import zarrita
+from zarrita.metadata import ShardingCodecIndexLocation
+
+codec_string = sys.argv[1]
+param_string = sys.argv[2]
+codec = []
+if codec_string == "blosc":
+ cname, shuffle, clevel = param_string.split("_")
+ codec = [zarrita.codecs.bytes_codec(),
+ zarrita.codecs.blosc_codec(typesize=4, cname=cname, shuffle=shuffle, clevel=int(clevel))]
+elif codec_string == "gzip":
+ codec = [zarrita.codecs.bytes_codec(), zarrita.codecs.gzip_codec(level=int(param_string))]
+elif codec_string == "zstd":
+ level, checksum = param_string.split("_")
+ codec = [zarrita.codecs.bytes_codec(), zarrita.codecs.zstd_codec(checksum=checksum == 'true', level=int(level))]
+elif codec_string == "bytes":
+ codec = [zarrita.codecs.bytes_codec(endian=param_string.lower())]
+elif codec_string == "transpose":
+ codec = [zarrita.codecs.transpose_codec((1, 0, 2)), zarrita.codecs.bytes_codec()]
+elif codec_string == "sharding":
+ codec = zarrita.codecs.sharding_codec(chunk_shape=(2, 2, 4), codecs=[zarrita.codecs.bytes_codec("little")],
+ index_location=ShardingCodecIndexLocation.start if param_string == "start"
+ else ShardingCodecIndexLocation.end),
+elif codec_string == "sharding_nested":
+ codec = zarrita.codecs.sharding_codec(chunk_shape=(2, 2, 4),
+ codecs=[zarrita.codecs.sharding_codec(chunk_shape=(2, 1, 2), codecs=[
+ zarrita.codecs.bytes_codec("little")])]),
+elif codec_string == "crc32c":
+ codec = [zarrita.codecs.bytes_codec(), zarrita.codecs.crc32c_codec()]
+else:
+ raise ValueError(f"Invalid {codec=}")
+
+store = zarrita.LocalStore(sys.argv[3])
+expected_data = np.arange(16 * 16 * 16, dtype='int32').reshape(16, 16, 16)
+
+a = zarrita.Array.open(store / 'write_to_zarrita' / codec_string / param_string)
+read_data = a[:, :]
+assert np.array_equal(read_data, expected_data), f"got:\n {read_data} \nbut expected:\n {expected_data}"
+
+b = zarrita.Array.create(
+ store / 'read_from_zarrita_expected' / codec_string / param_string,
+ shape=(16, 16, 16),
+ chunk_shape=(2, 4, 8),
+ dtype="uint32",
+ fill_value=0,
+ attributes={'test_key': 'test_value'},
+ codecs=codec
+)
+
+assert a.metadata == b.metadata, f"not equal: \n{a.metadata=}\n{b.metadata=}"
diff --git a/src/test/python-scripts/zarrita_write.py b/src/test/python-scripts/zarrita_write.py
new file mode 100644
index 0000000..2eb0fc2
--- /dev/null
+++ b/src/test/python-scripts/zarrita_write.py
@@ -0,0 +1,47 @@
+import sys
+
+import zarrita
+import numpy as np
+from zarrita.metadata import ShardingCodecIndexLocation
+
+codec_string = sys.argv[1]
+param_string = sys.argv[2]
+codec = []
+if codec_string == "blosc":
+ cname, shuffle, clevel = param_string.split("_")
+ codec = [zarrita.codecs.bytes_codec(),
+ zarrita.codecs.blosc_codec(typesize=4, cname=cname, shuffle=shuffle, clevel=int(clevel))]
+elif codec_string == "gzip":
+ codec = [zarrita.codecs.bytes_codec(), zarrita.codecs.gzip_codec(level=int(param_string))]
+elif codec_string == "zstd":
+ level, checksum = param_string.split("_")
+ codec = [zarrita.codecs.bytes_codec(), zarrita.codecs.zstd_codec(checksum=checksum == 'true', level=int(level))]
+elif codec_string == "bytes":
+ codec = [zarrita.codecs.bytes_codec(endian=param_string.lower())]
+elif codec_string == "transpose":
+ codec = [zarrita.codecs.transpose_codec((0, 1)), zarrita.codecs.bytes_codec()]
+elif codec_string == "sharding":
+ codec = zarrita.codecs.sharding_codec(chunk_shape=(2, 4), codecs=[zarrita.codecs.bytes_codec("little")],
+ index_location=ShardingCodecIndexLocation.start if param_string == "start"
+ else ShardingCodecIndexLocation.end),
+elif codec_string == "sharding_nested":
+ codec = zarrita.codecs.sharding_codec(chunk_shape=(2, 4),
+ codecs=[zarrita.codecs.sharding_codec(chunk_shape=(1, 2), codecs=[
+ zarrita.codecs.bytes_codec("little")])]),
+elif codec_string == "crc32c":
+ codec = [zarrita.codecs.bytes_codec(), zarrita.codecs.crc32c_codec()]
+else:
+ raise ValueError(f"Invalid {codec_string=}")
+
+store = zarrita.LocalStore(sys.argv[3])
+testdata = np.arange(16 * 16, dtype='int32').reshape((16, 16))
+
+a = zarrita.Array.create(
+ store / 'read_from_zarrita' / codec_string / param_string,
+ shape=(16, 16),
+ dtype='int32',
+ chunk_shape=(2, 8),
+ codecs=codec,
+ attributes={'answer': 42}
+)
+a[:, :] = testdata
diff --git a/src/test/python-scripts/zstd_decompress.py b/src/test/python-scripts/zstd_decompress.py
new file mode 100644
index 0000000..0235fdd
--- /dev/null
+++ b/src/test/python-scripts/zstd_decompress.py
@@ -0,0 +1,13 @@
+import sys
+
+import zstandard as zstd
+
+data_path = sys.argv[1]
+expected = sys.argv[2]
+
+with open(data_path, "rb") as f:
+ compressed = f.read()
+
+decompressed = zstd.ZstdDecompressor().decompress(compressed)
+number = int.from_bytes(decompressed, byteorder='big')
+assert number == int(expected)
diff --git a/testdata/sharding_index_location/end/c/0/0/0 b/testdata/sharding_index_location/end/c/0/0/0
new file mode 100644
index 0000000000000000000000000000000000000000..5704d5a58bc1b66ae76eb4e687acdd762da71156
GIT binary patch
literal 4228
zcmZA5w~_+O0R_;-%WLr|o)k}tC&iQENm=Zt={r=V57OB{RjSv;rKkJcNeFtV2?h$`
zukhE`Z@z!$-w}|YgyeTGB2kIyn7;Wp|K`86SAYk2fCqSh2Y8&vd7Q_2oX2^b$9bOTd7kHap67X<=Xssid7amJo!5Dt
z*SYPlAP>3-@*ofLAP@2cPw)g!@B~lr1W)h+FYp2{aKG+XffsmzH+X|Lc!M{1gEx4C
z+xiOe5D!^|c!-C1h$nfHCwY=5d6Fl2k{5ZA7kQBvd65@+kvDmhH+hpcd6PGJlUsg;
zd6hG%$&XLyEZc!ple_>9l^jL-Ot&-je*_>S-Rj_>%6@A!`2_>JHAjo&_kstYyKlp<`_=7+AgFpC#kNAj>_=u1Ah>!S)ulS0u_=>Oiim&*J
zpZJNN_=%tRiJ$n1KlzhC`IA5SlRx>BkNKF7`IwLSn2-6Gulbs<`I@i!ny>kqpZS@e
z`I(>jnV}EPs)|%HQPg@(=l^{{Z}(fAeqt&A<6K|K>TK<2jz=
zIiBM=p5rxM<27F6HD2R2UgH5C-~k@s0UqE19^i2v=W!nAaUSP!9_M+U=Xsvzd7kHa
zp67L5=XGA^bzbLnUgtp`Ph
z;0@m34c_1l-ryk~;vpX5As*r(9^y%!
zP2S{9-sE8(=3yS@VIJmT9_A^Y;whfuDW2jfp5i55;w4_yWnSiGUgmAy=55~QZQkZ>-sVvr
ze82~M
zz!!YM7kt4Ne8Crd!4Lev5B$In{J;i{Ka4V#V35iCw#&ue8MMu!Z&=wH+;i4e8V?Y7m|P
literal 0
HcmV?d00001
diff --git a/testdata/sharding_index_location/end/c/0/0/1 b/testdata/sharding_index_location/end/c/0/0/1
new file mode 100644
index 0000000000000000000000000000000000000000..c93d72d9c2082507a98cf09caa38323622bc08ce
GIT binary patch
literal 4228
zcmZA5xsGb-6@by(?Y3Ra4w^Ei+5IFIu@&+|Ob
z^E}VeZ2C;#N1{F8t301xm05AXmF@Bk0+1W)h;Pw)g!@B~lr0x$3aFYp2{
z@B%OJI9^EPku
zD39_ekMbyw@+gn;EYI>R&+;tK@+{BtDzEY?uktFd@+z|@9+-q@DA_r7?1H7
zkMS6f@feTs9MAC_&+#13@f^?b8n5vhukjkM@fxr3F7NU#@A5A1@-FZ4U;nTVditKe
zr|;=|`kub0@9AfJ#%FxSXMDzIe8zWt$9H_kcYMcpe8;c+%CG#&ul&ld{L1^h&-=X3
z`@GNlywB%+&gXp2=X}oRe9rfL&-Z-K_k7Rye9v$E#&7(_Z~Vq@{Kf}-zz2N52YkQ>
ze83lc!54hN7kt4Ne8CU=zz_Vu5B$In{J`)0&hPxr@BGg1{LY7b$cKE$hkVF~e8`u4
z$(MY|mwd^We94de$dCNUkNn7w{Ky~t!5{p=AN;`|{J}?j#7BI@M|{Lbe8g9L#aDdA
zSA4}+e8o@v#83RhPyEDB{KTL9$)EhmpZv+6{K?0B%*TAp$9&Ake9YH;&DVU**L=;_
ze9gc37ysg4{EL6_FaE{9`8WUO-~5|@^KbsmCw#&ue8MMu!Y6#fH+;i4e8V?i{Ka4V#b5lzr+mt%e9EVM%BOtFw|vXDe9O0d%eQ>XFZ{wU
z{K7B%!Y};7-~7$r{LSC|&ENdZ|F{o6{6P3{NPjpeKO9yc4vY^=`@`b=u)N|t&f`4J
z<2=seJkIkx&+|Ob^E}V%7i`JjjDQ$b&q{gFMKSJjs(h$&)yu?eq
z#7n%yOT5IJyvduq$(y{%o4m=xJj}y9%)>m)!#vE>Jk8TQ&C@*1(>%?~yv)nI%*(vY
z%e>56yv19*#aq0^TfD_1Ji;S9!XrGwBRs-0Ji{|Q!!ta?Gd#m9yuvHI!YjPOE4;$n
zyv^IZ&D*@q+q})AJj$ay%A-8Wqddy9Jj=5@%d3jN~zNhc$XMDzIe8y*d#%FxScYMcpe8+cu$9H_kul&ld{K~KV
z%CG#&`@GNlywCf*&-=X3=X}oRe9q^5&gXp2_k7Rye9!lM&-Z-KZ~Vq@{Kjwm#&7(_
z2YkQ>e82~Mzz2N57kt4Ne8Crd!54hN5B$In{J;>+R!Y6#fCw#&ue8M+;
z!#8}xH+;i4e8bQD%+LJH&-~2K{LEkc#b5lzU;M>i{Kcny%BOtFr+mt%e9E_c%eQ>X
zw|vXDe9JHV!Y};7FZ{wU{KDV-&ENdZ-~7$r{LTM={rvCX=k>?WzkC1p&;RND-+%w_
JfByCF{{bByFrEMa
literal 0
HcmV?d00001
diff --git a/testdata/sharding_index_location/end/c/0/1/0 b/testdata/sharding_index_location/end/c/0/1/0
new file mode 100644
index 0000000000000000000000000000000000000000..699a7712d22d230d1d4e29207245340547d9c5be
GIT binary patch
literal 4228
zcmZA3H?k_}0fyl_qmg&B1eI2x)o49~PY*uDlj2G7qX|@QQ@5+WqUq=D
zwCn%zhkyU{?Wb?we%;UZi`!p*upjMr_Up6X+aK(Y-@bkD5B|YF_y_;sAN+&oc#h|I
zj^}ue=Xj3Sc#YS1jn{aM*LaNwcz_3ZfCqSh2Y7(Td7Q_2oX2^b$9bIRd7kHap67X<
z=Xsvjd7amJo!5Dt*Lj@>d5{NrkOz5?2YHYuc!DQ*f+u)_CwPJvc!3vqffsmz7kGg;
zc!M{1gEx4CH+X}Gc!-C1h=+KHhj@r5d6Fl2k|%kRCwY<=d65@+kr#Q97kQC4d6PGJ
zlQ(&jH+hqXd6e8y*d#%FxSXMD$Ze8+cu$9H_kcl^e0{Kjwm#&7(_Z@ka@ywCf*
z&-=X3`+Uyle9q^5&gXp2=X}rie9!lM&-Z-K_x#T9{Lb(E&hPxr?|i@qe82~Mzz2N5
z2YkU7e8Crd!54hN7yQ5v{J;i{Ka2<%BOtFr+mt%e9EVM%eQ>Xw|vXDe9O1|%CG#&
zul&ld{K~KV&ENdZ-~7$r{LSC|`|rUoe{v4^v;D>XYJao8+du4|&H(&_fAA0f!9Vy1
z|KK^E<2jz=IiBM=p5rxM<27F6HD2R2UgH5C-~k@s0UqE19^i2v=W!nAaUSP!9_M+U
z=Xsvzd7kHap67L5=XGA^bzbLnUgtp`Ph;0@m34c_1l-ryk~;vpX5As*r(9^y%!P2S{9-sE8(=3yS@VIJmT9_A^Y;whfuDW2jfp5i55;w4_yWnSiGUgmAy=55~Q
zZQkZ>-sVvri{Ka4V#ixAAr+mt%e9EVM%C~&Ww|vXDe9O0d
z%dh;(ul&ld{K~KV%HRCW-~7$r{LSC|&HsP>?A-9{_5JU^y#L$h|MC8B|F7o%?fwr)
CeGt3=
literal 0
HcmV?d00001
diff --git a/testdata/sharding_index_location/end/c/0/1/1 b/testdata/sharding_index_location/end/c/0/1/1
new file mode 100644
index 0000000000000000000000000000000000000000..19a595ae8a1949a3005d6ef6f1ecc84f204c0a4f
GIT binary patch
literal 4228
zcmZA5x2|&O0fy0k#^W3>!SWUOYJ5#YZV&OaWJ;51$;grmGA$fZOU7#oOQYG+D`2l@
zx1YB7{=0wt@a>0h-+tQP>_=aJ{La3&U)fLher><8-+ue{!9Vy1|KK0|gMaW3p5l=3yS@VIJmTp5iH<;whfuDW2jfUgSky
zP2S{99^nxl;SnC;5gy?Yp5|$u=4qbhX`be3Ug9NQ;w4_yWnSiG-sWxI=55~QZQkZ>9^)|{
z<1rrNF&^VF{>eZ2C;#N1{F8t3PhQ~_Uf~s9;T2xt72e?;-r*hI;T_)L9sd0<&V#PL
ztMBT&`mVmK@9MkyDWCEwpYkc6@+qJ4E#LAj-|{Wr@-5%;3%~FSzwisc@C(229`ErU
z@9`e*@gDE-8K3bPpYa)=@fn}-9pCXC-|-#a@g3jsE5Gt9zw#@;@+-gcKJW8B@AE$I
z^FHtMIiK@6pYu7N^EsdMFaE{9_!s};U;K-I@f*MK8^7@zzwsNt@c|$30Uz)IAMgPm
z@C9G+1z+$5U+@KA@IBx2J>T;^-}61+^EJffA9x?@DU&J5g+jpAMp_%@fBb36<_fc
zU-1=R@gqO-BR}#ZKk_3#@+W`tCx7xMfAS}P@-ZLtF(30WAM-IE^EF@dHDB{JU-LCz
z^AkVu6F>12Kk*Yk@fUyb7k}{=fAJT8@d=;s37_x@pYREv@D1Pa4d3t$-|!9J@H0R2
zGe7e)Kl3v`^EZFh
zfA9~U1Z;0@m34c_1l-rx-$=3yS@VIJmT9_C@5;whfuDW2jfp5iH9P2S{9-sDXl;SnC;5gy?Y9^nz5=4qbhX`be3p5|#@;w4_yWnSiGUgl-q=55~QZQkZ>
z-sWu{<1rrNF&^VF9^*0o$v^og|Ky+ilYjD0Uf~s9;T2xt6<*;L-r*hI;T_)L9p2#`
z{>$^AtMBT&`mVmK@9Mkyu71j=e9EVM%BOtFr+mw|e9O0d%eQ>XxBS8{{K7B%!Y};7
zFTBTlyvKXI$9ufTdwj-ce8y*d#%FxSXMD$Ze8+cu$9H_kcl^q){K~KV%CG#&ue{Iu
zywCf*&-=X3`+Uyle9q^5&gXp2=lqL*@h|?xzxWsb;$Qs6Z~Vq@{Kjwm#&3MU2YkQ>
ze82~Mzz2N67kt4Ne8Crd!54hb_k7Rye9!lM&-eV!@BGg1{Lb(E&hLE4hkVF~e8`7<
z$cKE%mwd^We94!5$(Q`V5B$In{J;i{Ka4V#b12FCw#&ue8MMu!Y6#gH+;i4e8V?<
z!#DiQ&-~2K{LIh%%+LJI-~7$r{LSC|&ENe0*H8Z){B-^D^Y5Pj`Tc)-{^!5{`=5XP
F`+pcQFuVW&
literal 0
HcmV?d00001
diff --git a/testdata/sharding_index_location/end/zarr.json b/testdata/sharding_index_location/end/zarr.json
new file mode 100644
index 0000000..72e3a3f
--- /dev/null
+++ b/testdata/sharding_index_location/end/zarr.json
@@ -0,0 +1,76 @@
+{
+ "shape": [
+ 16,
+ 16,
+ 16
+ ],
+ "data_type": "int32",
+ "chunk_grid": {
+ "configuration": {
+ "chunk_shape": [
+ 16,
+ 8,
+ 8
+ ]
+ },
+ "name": "regular"
+ },
+ "chunk_key_encoding": {
+ "configuration": {
+ "separator": "/"
+ },
+ "name": "default"
+ },
+ "fill_value": 0,
+ "codecs": [
+ {
+ "configuration": {
+ "chunk_shape": [
+ 8,
+ 4,
+ 8
+ ],
+ "codecs": [
+ {
+ "configuration": {
+ "order": [2, 1, 0]
+ },
+ "name": "transpose"
+ },
+ {
+ "configuration": {
+ "endian": "little"
+ },
+ "name": "bytes"
+ },
+ {
+ "configuration": {
+ "typesize": 4,
+ "cname": "lz4",
+ "clevel": 5,
+ "shuffle": "noshuffle",
+ "blocksize": 0
+ },
+ "name": "blosc"
+ }
+ ],
+ "index_codecs": [
+ {
+ "configuration": {
+ "endian": "little"
+ },
+ "name": "bytes"
+ },
+ {
+ "name": "crc32c"
+ }
+ ],
+ "index_location": "end"
+ },
+ "name": "sharding_indexed"
+ }
+ ],
+ "attributes": {},
+ "zarr_format": 3,
+ "node_type": "array"
+}
\ No newline at end of file
diff --git a/testdata/sharding_index_location/start/c/0/0/0 b/testdata/sharding_index_location/start/c/0/0/0
new file mode 100644
index 0000000000000000000000000000000000000000..e828007441ccfdc8321284a99b6a24218b7b37ea
GIT binary patch
literal 4228
zcmZA4x3(%vmIcs*Ze0iMqH)oMJ7`bz2#9ZN|5k!uD{`&of
z|LUv%@BbX1{_S7=^KY!M#f3Eup7zb78ulb*d9#t_J?U(=ZU;by!
zFaPDgJjZi9$8$W#b3Dg$yvA$1#%sLBYrMv59Q_2`0zAM2Jir4yz~em5<2=seJkH}h
z&htFa^E}V%7kEyw2;q&Q1RWdC)_U2YHYOd5|Y~f+u)_CwPJ1Z;0@m34c_1l-rx;x>LP2S{9Zuk@CVIJmT6JZ|aVV>eCp5iH<;whfuDPH0wUg9NQ
z;w4_%@7Jk8TQ&C|Th%e>6Xyv)nI
z%*(vZ+q})&yv^IZ&D&i2iSj6q@+gn;D2Vb5&+rV-@C?uJ4A1ZiukZ@5@CvW+3a{`E
z@9+-q@DA_r4)1W+PmITSjK_G4$9N28d6s8+mS=gEXL*)ad6idrl~;L{S9z6pd6##2
zmv?!WcX^lpeLv{Q_vCx>J^7w|PrfJLlb`V!pYa)=@fn}-8Q<|8-|-#a@g3js9l!A#
zzwsNt@f*MK8}IWz@AE$I^FHtMKA-bBpYu7N^EsdMIp6a=-}61+^F815J-_oizwK5nu5YU-1?H^B=V0E570=4XE9XMW~y{^oD~=5PMyZ~o>JKH(ES;S)aL
z6F%V^zTq3b;Tyi;8@}Nee&H8>;TL}47k=R%{^1|~;UE6tAO7J}KIKzB%7i`JjjDQ$b&q{gFMI+
zJi!w@!4o{e6Fk8Syub^*zze*<3%tM^yulm1!5h568@$0oJj6ph#6vv9Lp;QjJjs(h
z$&)m)!#vDWJjGKy#Zx@R
zQ#{2>yu?eq#7n%yOT5Heyv19*#aq0^TfD_1Ji;S9!XrGwBRs;>Jk8TQ&C@*1(>%?~
zyv)nI%*(vY%e>6nyv^IZ&D*@q+q})AJj$ay%A-8WqddwpJi{|Q!!ta?Gd#m9yuvHI
z!YjPOE4;!xyu&-Z!#ljgJG{eVJjP=@#$!CjV?4&QJj=5@%d9l^jPLl4@A!`I
z_>S-Rj^Fr=-}sH+_>JHAjrVz<_j#Z9d7t-rpU?T6&-t9s`JB)BobUOb@A;nZ`JV6j
zp5OVM-}#;2`JLbSoe%hc5BPu&_<#@ifG_xhFZhBl_<}F^f*<&SANYYE_<&_kw5v9KlzhC
z`IA5SlaKg_kNAj>_=u1Ah_CpHulS0u_=>OiivRgWJ@FGi@e@Ds6F>16fAJT8@ju_N
zFaF{$KIUUS=3_qQV?O3%zUFJb=4-y@Yrf`de&%O>=4XE9XMW~q{^oD~=5PMyZ~o?Q
zKH(ES;S)aL6F%V+zTq3b;Tyi;8@}Nie&H8>;TL}47k=Rv{^1|~;UE6tAO7JVKIKzB
x#xPT8!AxVZng6ms!Rw1!oI|qm=Haw4zcIfx
zzcW9+`@Q*t`QuMN#d(~^d7Q_2oX2^b=Xsvzd7kHap67X<*Lj`Sd7amJo!5Dt2Y7%7
zcz_3ZfCqShCwPJ8ueulGS$-_>{ZU42*I)pzw>{ghAnlu!ATPx+Kj
z`Ic|_mT&o%Z~2yQ`8WUO-~5|@^Kbsmzj=@Mc#rpZkN0?w_xOy@_>9l^jL-Ot&-jk-
z_>S-Rj_>%6@A#Ep`ITS!m0$UlUwNPRd7t-rpZ9s6_xYU9`JB)BoX`23&-tG3`JV6j
zp6~gd@A-}2_>JHAjo&_kstYyANhkn
z_=7+AgFpC#Klq4`_=u1Ah>!S)kNAqO_=>Oiim&*JulR|d_=%tRiJ$n1pZJqM`IA5S
zlRx>BKlzxC`IwLSn2-6GkNKLf`I@i!ny>kqulbpu`I(>jnV(+7v+b`>cfTcVQGI@oFA4~oX2^b$9bH`d7Q_2p67X<=Xsvzd7kHao!5Dt
z*Lj`Sd7amJfCqSh2Y7%7cz_3Zf+u)_CwPJF*o40wJw|Seld6Y+alt+1#M|qS-
zd6s8+mS=gEXL*)qd6idrl~;L{S9z6Jd53p+hj(~~cX)?)c#Ow*jK_G4$9Rm#c#h|I
zj^}ue=Xj3ic#YS1jn{aM*LaQB_!s};U;K-I@h|?xzxc2BL08|^clBL;SKrll^{I6kN0?w_jr%@c#rq^jL-Ot
z&-jeb_>9l^j_>%6@A!`I_>S-Rm0$UlU-^|^`ITRJpZ9s6_j#Z9d7tJHAfDib95BPu&_<#@if-m@jFZhBl_<}F^
zfgkvRANYYE_<&_gFpC#Klp<`_=7+Ah>!S)kNAj>_=u1Aim&*JulS0u_=>OiiJ$n1pZJNN
z_=%tRlRx>BKlzhC`IA5Sn2-6GkNKF7`IwLSny>kqulbs<`I@i!nVj
zi@*4bzxa#4_=~^zgirW{PxypS_=HdRhHvaFhP
zJe_uZfBo)<-=BZ}!}ag~z5eg{U%&o;|M&0z|M{PPyZqy)Kl#hgzx)0@`~JOeZomD+
zeri9n@BjOS{nCE*-FF}SgMaW3{=q-^2mjzXp5r;5<2jz=IiBM+UgI@h<27F6HD2QZ
z9^e5U-~k@s0UqFS9_Mi$=W!nAaUSP+p67X<=Xsvzd7kHWUgvdQ=XGA^bzbK|9^^qD
zP2S{T9_C>l=3yS@VIJlw
zp5iH<;whfuDW2jbUg9NQ;w4_yWnSiO-sWxI=55~QZQkZl9_3LU9l^jL-Ot
z@A!`I_>S-Rj_>%6U-*Sz_=R8igJHAjSu;d5BZP}`H&C!kT3a?FZq%$`I0aBk{|eiANYYE_<_=u1Ah_CpHulS0u_=>OiiXZurANi3V`H>&_kw5r@
zKlp<`_=7+AgOB-`kNKF7`IwLSn6LSoulbs<`I@i!nxFWIpZJNN_=%tRi9h+1KlzhC
z`IA5SlTY}BPxypS_=HdRgm3tUZ}^69_=a!zhM)PFpZS@e`I(>jnZNjpzxa#4_=~^z
zi%8ue7ysg4{ENT&o4@&+zxkWL`J4av
zd+^Pf;@fZRxAr@8rug;;`=kBI8GwKA5B|YF_y_;sA3Vo%JjZi9$8$W#bG*iDyvA$1
z#%sLBYdpXMJir4yzymzM13b>-JkH}h&f`4J<2=vvJkRqy&+|Ob^SsXMyw2;q&g;C+
z>paMVJjjDQ$b&q{gFL|#Ji!w@!4o{e6TH9+yub^*zze*<3%tP_yulm1!5h568$85A
zJj6ph#6vv9Lp;foJjs(h$&)@_JJj}y9
z%)>m)!#u@PJjGKy#Zx@RQ@q4Wyu?eq#7n%yOT5Kfyv19*#aq0^TRg%eJi;S9!XrGw
zBRtL1Jk8TQ&C@*1)4a^fyv)nI%*(vY%e>9oyv^IZ&D*@q+dRsnJj$ay%A-8Wqddbi
zJi{|Q!!ta?GrYnpyuvHI!YjPOE4;%yyu&-Z!#ljgJ3Pi?JjP=@#$!CjV?4{VJj=5@
z%d3jMapYa)=
z@fn}-8K3bT-|-#a@g3js9pCW_zwisc@C(223%~F_@AE$I^FHtMKJW86pYu7N^EsdM
zIiK@S{>eZ2C;#N1{F8t3E5Gt9zw#@;@+-gc0Uz)IAMgPm@Btt21z+$5U+@KA@C9G+
zJ>T;^-}61+^F8158^7@zzwsNt@f*MKAs_M~AMzm|@*yAcC13I-U-Bhi@+DvL13&Ns
zKkx%T@B=^aJHPWgzwK6<_fcU-1=R@fBb3BR}#ZKk_3#
z@*_X;2Y>JffA9x?@CSeJF(30WAM-IE^D!UuHDB{JU-LCz^EF@d6F>12Kk*Yk@e@Ds
zCx7xMfAS}P@+W`t37_x@pYREv@Cl#r4d3t$-|!9J@D1PaGe7e)Kl3v`^D{s57k}{=
zfAJT8@fUybDWCEwpYkc6@+qJ4E#LAj-|{Wr@-5%;FaE{9_!s};U;K-I@i%|-H-Gat
KfAcqg^Zx)S0}&Pg
literal 0
HcmV?d00001
diff --git a/testdata/sharding_index_location/start/c/0/1/1 b/testdata/sharding_index_location/start/c/0/1/1
new file mode 100644
index 0000000000000000000000000000000000000000..cf8c29248a4e814b3b88f392ac7e43d90094a4cf
GIT binary patch
literal 4228
zcmZA3H?jih0fo^SY2@u1v=;3^Ds4bof)+9O^ufcVcv3tmo)k}tMeZ2C;#N1{F8t3PhQ{!Uf=~@;00db1>WEd-rx=1;0@m34IbuU9_C>l=3yS@
zVV>kkp5#fMO&;M99^nxl;SnC;5uV~H
zp5iH<;whfuDPH0wUg9NQ;w4_yW!~m(-sWxI=55~QZ64z>9^)|{<1rrNF`nTWp5Ymu;TfLc
z8D8NPUf~s9;T2xt72e?;-r*hI;T_)L9sc9ul&ld{K~KV%CEe~d%VYcyvKXI$9sInXMDzIe8y*d#%FxT
zcYMcpe8+cu$9Me3Z~Vq@{Kjwm#&5jO`@GNlywCf*&-;AN=X}oRe9q^5&gXp3_k7Ry
ze9!lM&-eV!@BGg1{Lb(E&hLD{2YkQ>e82~Mzz2N67kt4Ne8Crd!593%5B$In{J;i
z{Ka2-c!3vqffsmz7kGg;c!M{1gEx4CH+X}Gd6T;^zwJfAMzm|@*yAcAs_M~U-Bhi@+DvLC13I-Kk_3#@*_X;
zBR}#ZfAS}P@+W`tCx7xMAMp_%@ev>K5g+jpU-1=R@fBb36<_fcKk*Yk@e@Ds6F>12
z|KeZ#i+}Mi{>8ue7a#L6AM-IE^D!UuF<
Date: Mon, 24 Jun 2024 12:13:09 +0200
Subject: [PATCH 02/15] add publish info to pom.xml
---
pom.xml | 48 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 48 insertions(+)
diff --git a/pom.xml b/pom.xml
index e3f4e4b..8139029 100644
--- a/pom.xml
+++ b/pom.xml
@@ -8,6 +8,28 @@
zarr-java
0.0.1-SNAPSHOT
+ Zarr-Java
+
+ Zarr-Java is a Java library providing an implementation of chunked,
+ compressed, N-dimensional arrays implementing the ZarrV3 format.
+
+ https://github.com/zarr-developers/zarr-java
+
+
+
+ MIT License
+ https://opensource.org/license/mit/
+ repo
+
+
+
+
+ https://github.com/zarr-developers/zarr-java
+ scm:git:git://github.com/zarr-developers/zarr-java.git
+ scm:git:git@github.com:zarr-developers/zarr-java.git
+
+
+
8
8
@@ -85,6 +107,14 @@
+
+
+
+ ossrh
+ https://oss.sonatype.org/service/local/staging/deploy/maven2/
+
+
+
unidata-all
@@ -103,6 +133,24 @@
false
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.13.0
+
+
+ ${maven.compiler.target}
+
+
+ org.sonatype.plugins
+ nexus-staging-maven-plugin
+ 1.6.6
+ true
+
+ nexus
+ http://localhost:8081/nexus/
+
+
From 2c33ca2c2bb5110a7f47d1be72524cfffe2c6ba8 Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Mon, 24 Jun 2024 12:34:03 +0200
Subject: [PATCH 03/15] use build info from jzarr
---
pom.xml | 81 ++++++++++++++++++++++++++++++++++++++++++++++++++-------
1 file changed, 72 insertions(+), 9 deletions(-)
diff --git a/pom.xml b/pom.xml
index 8139029..d5669c5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,7 +29,6 @@
scm:git:git@github.com:zarr-developers/zarr-java.git
-
8
8
@@ -107,13 +106,13 @@
-
ossrh
https://oss.sonatype.org/service/local/staging/deploy/maven2/
+
@@ -140,17 +139,81 @@
${maven.compiler.target}
+ UTF-8
+
+ .gitignore
+ temp/**
+
-
- org.sonatype.plugins
- nexus-staging-maven-plugin
- 1.6.6
- true
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 3.1.0
+
+
+ verify
+
+ jar
+
+
+
+ temp.**
+
+
+
+
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ 3.5.0
+
+
+ attach-javadoc
+
+ jar
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-resources-plugin
+ 3.1.0
- nexus
- http://localhost:8081/nexus/
+ UTF-8
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 1.10
+
+
+ org.apache.maven.plugins
+ maven-gpg-plugin
+ 1.6
+
+
+ sign-artifacts
+ verify
+
+ sign
+
+
+
+ TODO
+
+
+
+
From 0c9f4d654e43ef959d9e265509950307efd85069 Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Mon, 24 Jun 2024 15:18:41 +0200
Subject: [PATCH 04/15] create deploy.yml based on jzarr
---
.github/workflows/deploy.yml | 38 ++++++++++++++++++++++++++++++++++++
1 file changed, 38 insertions(+)
create mode 100644 .github/workflows/deploy.yml
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
new file mode 100644
index 0000000..efc02ba
--- /dev/null
+++ b/.github/workflows/deploy.yml
@@ -0,0 +1,38 @@
+name: Publish package to the Maven Central Repository
+on:
+ release:
+ types: [created]
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+
+ - uses: actions/checkout@v3
+
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '22'
+ distribution: 'temurin'
+ server-id: ossrh
+ server-username: MAVEN_USERNAME
+ server-password: MAVEN_PASSWORD
+
+ - name: Download blosc jar
+ run: |
+ mkdir -p ../blosc-java/target
+ curl https://static.webknossos.org/misc/blosc-java-0.1-1.21.4-SNAPSHOT.jar -o ../blosc-java/target/blosc-java-0.1-1.21.4-SNAPSHOT.jar
+
+ - id: install-secret-key
+ name: Install gpg secret key
+ run: |
+ # Install gpg secret key
+ cat <(echo -e "${{ secrets.OSSRH_GPG_SECRET_KEY }}") | gpg --batch --import
+ # Verify gpg secret key
+ gpg --list-secret-keys --keyid-format LONG
+
+ - name: Publish package
+ run: mvn --batch-mode deploy
+ env:
+ MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
+ MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}
\ No newline at end of file
From c5818cb161ad14bd09f5653e5d4ddd46d02a009d Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Mon, 24 Jun 2024 15:23:14 +0200
Subject: [PATCH 05/15] configure maven-gpg-plugin to prevent gpg from using
PIN entry programs
---
.github/workflows/deploy.yml | 9 +++++----
pom.xml | 7 ++++++-
2 files changed, 11 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index efc02ba..b3965cf 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -12,11 +12,11 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '22'
+ java-version: '22' # or 11?
distribution: 'temurin'
server-id: ossrh
- server-username: MAVEN_USERNAME
- server-password: MAVEN_PASSWORD
+ server-username: ${{secrets.OSSRH_USERNAME}}
+ server-password: ${{secrets.OSSRH_TOKEN}}
- name: Download blosc jar
run: |
@@ -32,7 +32,8 @@ jobs:
gpg --list-secret-keys --keyid-format LONG
- name: Publish package
- run: mvn --batch-mode deploy
+ # when working, add --no-transfer-progress
+ run: mvn --batch-mode -Dgpg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} deploy
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index d5669c5..5ddd1d1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -113,7 +113,6 @@
-
unidata-all
@@ -200,6 +199,12 @@
org.apache.maven.plugins
maven-gpg-plugin
1.6
+
+
+ --pinentry-mode
+ loopback
+
+
sign-artifacts
From 9d37dd766556d99df9cd9bf206176ea25ec7aac0 Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:01:55 +0200
Subject: [PATCH 06/15] add blosc dependency from maven
---
.github/workflows/ci.yml | 5 -----
pom.xml | 6 ++++++
2 files changed, 6 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index be4cfe8..093de40 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -38,11 +38,6 @@ jobs:
python -m venv venv_zarrita
if [ "${{ runner.os }}" = "Windows" ]; then venv_zarrita/Scripts/pip install zarrita; else venv_zarrita/bin/pip install zarrita; fi
- - name: Download blosc jar
- run: |
- mkdir -p ../blosc-java/target
- curl https://static.webknossos.org/misc/blosc-java-0.1-1.21.4-SNAPSHOT.jar -o ../blosc-java/target/blosc-java-0.1-1.21.4-SNAPSHOT.jar
-
- name: Download testdata
run: |
mkdir testoutput
diff --git a/pom.xml b/pom.xml
index 5ddd1d1..6c5bfa2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -104,6 +104,12 @@
4.13.1
test
+
+ com.scalableminds
+ blosc-java
+ 0.1-1.21.4
+
+
From 6e3830a965f5ee4ccc0937cd7e71a11918a6f02d Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:19:44 +0200
Subject: [PATCH 07/15] incorporate compiler feedback to set release java
version 8
---
.github/workflows/ci.yml | 2 +-
pom.xml | 9 +--------
2 files changed, 2 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 093de40..eb7d84e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,7 +24,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '22'
+ java-version: '8'
distribution: 'temurin'
cache: maven
diff --git a/pom.xml b/pom.xml
index 6c5bfa2..e0ab3d7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -104,12 +104,6 @@
4.13.1
test
-
- com.scalableminds
- blosc-java
- 0.1-1.21.4
-
-
@@ -142,8 +136,7 @@
maven-compiler-plugin
3.13.0
-
- ${maven.compiler.target}
+ 8
UTF-8
.gitignore
From f66cf2d051bf998124d1ef56e9b0e6588359ee81 Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:28:05 +0200
Subject: [PATCH 08/15] set java compile version 11
---
.github/workflows/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index eb7d84e..576d837 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,7 +24,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '8'
+ java-version: '11'
distribution: 'temurin'
cache: maven
From aee2bfe2645b7330a43b97fced032a22efc426e6 Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:45:10 +0200
Subject: [PATCH 09/15] remove java 8 incompatible Map.of
---
pom.xml | 5 ++---
src/test/java/dev/zarr/zarrjava/ZarrTest.java | 12 +++++++++---
2 files changed, 11 insertions(+), 6 deletions(-)
diff --git a/pom.xml b/pom.xml
index e0ab3d7..d60e07d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,8 +30,7 @@
- 8
- 8
+ 8
UTF-8
2.14.2
1.12.477
@@ -136,7 +135,7 @@
maven-compiler-plugin
3.13.0
- 8
+ ${maven.compiler.release}
UTF-8
.gitignore
diff --git a/src/test/java/dev/zarr/zarrjava/ZarrTest.java b/src/test/java/dev/zarr/zarrjava/ZarrTest.java
index b9632be..c0e50ca 100644
--- a/src/test/java/dev/zarr/zarrjava/ZarrTest.java
+++ b/src/test/java/dev/zarr/zarrjava/ZarrTest.java
@@ -14,7 +14,6 @@
import dev.zarr.zarrjava.v3.*;
import dev.zarr.zarrjava.v3.codec.CodecBuilder;
import dev.zarr.zarrjava.v3.codec.core.TransposeCodec;
-import jdk.jshell.spi.ExecutionControl;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -30,6 +29,7 @@
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Comparator;
+import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;
@@ -150,13 +150,16 @@ public void testWriteReadWithZarrita(String codec, String codecParam) throws Exc
int[] testData = new int[16 * 16 * 16];
Arrays.setAll(testData, p -> p);
+ Map attributes = new HashMap<>();
+ attributes.put("test_key", "test_value");
+
StoreHandle storeHandle = new FilesystemStore(TESTOUTPUT).resolve("write_to_zarrita", codec, codecParam);
ArrayMetadataBuilder builder = Array.metadataBuilder()
.withShape(16, 16, 16)
.withDataType(DataType.UINT32)
.withChunkShape(2, 4, 8)
.withFillValue(0)
- .withAttributes(Map.of("test_key", "test_value"));
+ .withAttributes(attributes);
switch (codec) {
case "blosc":
@@ -425,8 +428,11 @@ public void testV3FillValue() throws ZarrException {
public void testV3Group() throws IOException, ZarrException {
FilesystemStore fsStore = new FilesystemStore(TESTOUTPUT);
+ Map attributes = new HashMap<>();
+ attributes.put("hello", "world");
+
Group group = Group.create(fsStore.resolve("testgroup"));
- Group group2 = group.createGroup("test2", Map.of("hello", "world"));
+ Group group2 = group.createGroup("test2", attributes);
Array array = group2.createArray("array", b ->
b.withShape(10, 10)
.withDataType(DataType.UINT8)
From e3ed428501bbec66b7b80d13931dd6b532fd3fdf Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:49:37 +0200
Subject: [PATCH 10/15] set java version 8
---
.github/workflows/ci.yml | 2 +-
.github/workflows/deploy.yml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 576d837..eb7d84e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,7 +24,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '11'
+ java-version: '8'
distribution: 'temurin'
cache: maven
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index b3965cf..6055323 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -12,7 +12,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '22' # or 11?
+ java-version: '8'
distribution: 'temurin'
server-id: ossrh
server-username: ${{secrets.OSSRH_USERNAME}}
From 2ff00981939dfe60a4ae55103cba6b20f64dbccd Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:50:22 +0200
Subject: [PATCH 11/15] remove download blosc-java manually
---
.github/workflows/deploy.yml | 5 -----
1 file changed, 5 deletions(-)
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 6055323..b0ee259 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -18,11 +18,6 @@ jobs:
server-username: ${{secrets.OSSRH_USERNAME}}
server-password: ${{secrets.OSSRH_TOKEN}}
- - name: Download blosc jar
- run: |
- mkdir -p ../blosc-java/target
- curl https://static.webknossos.org/misc/blosc-java-0.1-1.21.4-SNAPSHOT.jar -o ../blosc-java/target/blosc-java-0.1-1.21.4-SNAPSHOT.jar
-
- id: install-secret-key
name: Install gpg secret key
run: |
From a44729e6a8dace8b48ff2562f4e73a6196519b2a Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Wed, 26 Jun 2024 11:55:18 +0200
Subject: [PATCH 12/15] compile from java 11 for release java 8
---
.github/workflows/ci.yml | 2 +-
.github/workflows/deploy.yml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index eb7d84e..576d837 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,7 +24,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '8'
+ java-version: '11'
distribution: 'temurin'
cache: maven
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index b0ee259..552f071 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -12,7 +12,7 @@ jobs:
- name: Set up JDK
uses: actions/setup-java@v3
with:
- java-version: '8'
+ java-version: '11'
distribution: 'temurin'
server-id: ossrh
server-username: ${{secrets.OSSRH_USERNAME}}
From 22b55149cae1e15eb540a2ef1aa7abf28121d58e Mon Sep 17 00:00:00 2001
From: Josh Moore
Date: Thu, 4 Jul 2024 10:49:14 +0200
Subject: [PATCH 13/15] Add steps from ci.yml to deploy.yml
---
.github/workflows/deploy.yml | 23 ++++++++++++++++++++++-
1 file changed, 22 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index f751354..28ffbc3 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -29,8 +29,29 @@ jobs:
# Verify gpg secret key
gpg --list-secret-keys --keyid-format LONG
+ # Begin copy from ci.yml. Refactor?
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+
+ - name: Install zarrita
+ run: |
+ python -m venv venv_zarrita
+ if [ "${{ runner.os }}" = "Windows" ]; then venv_zarrita/Scripts/pip install zarrita; else venv_zarrita/bin/pip install zarrita; fi
+
+ - name: Download testdata
+ run: |
+ mkdir testoutput
+ curl https://static.webknossos.org/data/zarr_v3/l4_sample.zip -o testdata/l4_sample.zip
+ cd testdata
+ unzip l4_sample.zip
+
+ # End copy from ci.yml
+
- name: Publish package
- run: mvn --batch-mode deploy
+ run: mvn --batch-mode deploy -DargLine="-Xmx6g"
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}
From 561c70bb8ffdd4b86b2265362051a55305b45b1c Mon Sep 17 00:00:00 2001
From: Josh Moore
Date: Thu, 4 Jul 2024 10:50:02 +0200
Subject: [PATCH 14/15] Bump to 0.0.2
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 60f07d4..79314c4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
dev.zarr
zarr-java
- 0.0.1
+ 0.0.2
zarr-java
From e4bff363dbe978c4e9c2ac0d1544cfcd3123545c Mon Sep 17 00:00:00 2001
From: brokkoli71
Date: Thu, 4 Jul 2024 11:05:49 +0200
Subject: [PATCH 15/15] remove duplicate entries
---
pom.xml | 21 ---------------------
1 file changed, 21 deletions(-)
diff --git a/pom.xml b/pom.xml
index 79314c4..7349e4a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -41,27 +41,6 @@
- Zarr-Java
-
- Zarr-Java is a Java library providing an implementation of chunked,
- compressed, N-dimensional arrays implementing the ZarrV3 format.
-
- https://github.com/zarr-developers/zarr-java
-
-
-
- MIT License
- https://opensource.org/license/mit/
- repo
-
-
-
-
- https://github.com/zarr-developers/zarr-java
- scm:git:git://github.com/zarr-developers/zarr-java.git
- scm:git:git@github.com:zarr-developers/zarr-java.git
-
-
8
UTF-8