diff --git a/README.md b/README.md
index bc97e9b8..59b25df2 100644
--- a/README.md
+++ b/README.md
@@ -22,7 +22,7 @@ N5 group is not a single file but simply a directory on the file system.  Meta-d
 
 1. All directories of the file system are N5 groups.
 2. A JSON file `attributes.json` in a directory contains arbitrary attributes.  A group without attributes may not have an `attributes.json` file.
-3. The version of this specification is 4.0.0 and is stored in the "n5" attribute of the root group "/".
+3. The version of this specification is 1.0.0 and is stored in the "n5" attribute of the root group "/".
 4. A dataset is a group with the mandatory attributes:
    * dimensions (e.g. [100, 200, 300]),
    * blockSize (e.g. [64, 64, 64]),
@@ -38,7 +38,7 @@ N5 group is not a single file but simply a directory on the file system.  Meta-d
      * xz with parameters
        * preset (integer, default 6).
        
-   Custom compression schemes with arbitrary parameters can be added using [compression annotations](#extensible-compression-schemes), e.g. [N5 Blosc](https://github.com/saalfeldlab/n5-blosc) and [N5 ZStandard](https://github.com/JaneliaSciComp/n5-zstandard/).
+   Custom compression schemes with arbitrary parameters can be added using [compression annotations](#extensible-compression-schemes), e.g. [N5 Blosc](https://github.com/saalfeldlab/n5-blosc).
 5. Chunks are stored in a directory hierarchy that enumerates their positive integer position in the chunk grid (e.g. `0/4/1/7` for chunk grid position p=(0, 4, 1, 7)).
 6. Datasets are sparse, i.e. there is no guarantee that all chunks of a dataset exist.
 7. Chunks cannot be larger than 2GB (2<sup>31</sup>Bytes).
@@ -134,3 +134,4 @@ Custom compression schemes can be implemented using the annotation discovery mec
 HDF5 is a great format that provides a wealth of conveniences that I do not want to miss.  It's inefficiency for parallel writing, however, limit its applicability for handling of very large n-dimensional data.
 
 N5 uses the native filesystem of the target platform and JSON files to specify basic and custom meta-data as attributes.  It aims at preserving the convenience of HDF5 where possible but doesn't try too hard to be a full replacement.
+Please do not take this project too seriously, we will see where it will get us and report back when more data is available.
diff --git a/pom.xml b/pom.xml
index 5567ec39..e38dc5d5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,7 +5,7 @@
 	<parent>
 		<groupId>org.scijava</groupId>
 		<artifactId>pom-scijava</artifactId>
-		<version>38.0.1</version>
+		<version>40.0.0</version>
 		<relativePath />
 	</parent>
 
@@ -161,6 +161,18 @@
 			<groupId>com.google.code.gson</groupId>
 			<artifactId>gson</artifactId>
 		</dependency>
+		<dependency>
+			<groupId>org.scijava</groupId>
+			<artifactId>scijava-common</artifactId>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-compress</artifactId>
+		</dependency>
+		<dependency>
+			<groupId>com.google.guava</groupId>
+			<artifactId>guava</artifactId>
+		</dependency>
 
 		<!-- Test dependencies -->
 		<dependency>
@@ -168,6 +180,17 @@
 			<artifactId>junit</artifactId>
 			<scope>test</scope>
 		</dependency>
+		<dependency>
+			<groupId>org.janelia.saalfeldlab</groupId>
+			<artifactId>n5-universe</artifactId>
+			<exclusions>
+				<exclusion>
+					<groupId>org.janelia.saalfeldlab</groupId>
+					<artifactId>n5</artifactId>
+				</exclusion>
+			</exclusions>
+			<scope>test</scope>
+		</dependency>
 		<dependency>
 			<groupId>net.imagej</groupId>
 			<artifactId>ij</artifactId>
@@ -194,14 +217,6 @@
 			<version>${commons-collections4.version}</version>
 			<scope>test</scope>
 		</dependency>
-		<dependency>
-			<groupId>org.scijava</groupId>
-			<artifactId>scijava-common</artifactId>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.commons</groupId>
-			<artifactId>commons-compress</artifactId>
-		</dependency>
 	</dependencies>
 
 	<repositories>
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/AbstractDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/AbstractDataBlock.java
index f1cbc352..59208fcf 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/AbstractDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/AbstractDataBlock.java
@@ -25,6 +25,11 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
 /**
  * Abstract base class for {@link DataBlock} implementations.
  *
@@ -63,4 +68,20 @@ public T getData() {
 
 		return data;
 	}
+
+	@Override
+	public void readData(final DataInput input) throws IOException {
+
+		final ByteBuffer buffer = toByteBuffer();
+		input.readFully(buffer.array());
+		readData(buffer);
+	}
+
+	@Override
+	public void writeData(final DataOutput output) throws IOException {
+
+		final ByteBuffer buffer = toByteBuffer();
+		output.write(buffer.array());
+	}
+
 }
\ No newline at end of file
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/BlockParameters.java b/src/main/java/org/janelia/saalfeldlab/n5/BlockParameters.java
new file mode 100644
index 00000000..65a21497
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/BlockParameters.java
@@ -0,0 +1,11 @@
+package org.janelia.saalfeldlab.n5;
+
+public interface BlockParameters {
+
+	public long[] getDimensions();
+
+	public int getNumDimensions();
+
+	public int[] getBlockSize();
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/ByteArrayDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/ByteArrayDataBlock.java
index 5717ad2e..4610811d 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/ByteArrayDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/ByteArrayDataBlock.java
@@ -25,6 +25,8 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 public class ByteArrayDataBlock extends AbstractDataBlock<byte[]> {
@@ -47,6 +49,12 @@ public void readData(final ByteBuffer buffer) {
 			buffer.get(getData());
 	}
 
+	@Override
+	public void readData(final DataInput inputStream) throws IOException {
+
+		inputStream.readFully(data);
+	}
+
 	@Override
 	public int getNumElements() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/Bzip2Compression.java b/src/main/java/org/janelia/saalfeldlab/n5/Bzip2Compression.java
index 5d3d6161..49a333f3 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/Bzip2Compression.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/Bzip2Compression.java
@@ -32,8 +32,10 @@
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
 import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
 import org.janelia.saalfeldlab.n5.Compression.CompressionType;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
 
 @CompressionType("bzip2")
+@NameConfig.Name("bzip2")
 public class Bzip2Compression implements DefaultBlockReader, DefaultBlockWriter, Compression {
 
 	private static final long serialVersionUID = -4873117458390529118L;
@@ -52,17 +54,29 @@ public Bzip2Compression() {
 	}
 
 	@Override
-	public InputStream getInputStream(final InputStream in) throws IOException {
+	public InputStream decode(final InputStream in) throws IOException {
 
 		return new BZip2CompressorInputStream(in);
 	}
 
 	@Override
-	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+	public InputStream getInputStream(final InputStream in) throws IOException {
+
+		return decode(in);
+	}
+
+	@Override
+	public OutputStream encode(final OutputStream out) throws IOException {
 
 		return new BZip2CompressorOutputStream(out, blockSize);
 	}
 
+	@Override
+	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+
+		return encode(out);
+	}
+
 	@Override
 	public Bzip2Compression getReader() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Reader.java b/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Reader.java
index 812c6ebb..324d242e 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Reader.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Reader.java
@@ -27,7 +27,6 @@
 
 import java.lang.reflect.Type;
 
-import com.google.gson.JsonSyntaxException;
 import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
 import org.janelia.saalfeldlab.n5.cache.N5JsonCache;
 import org.janelia.saalfeldlab.n5.cache.N5JsonCacheableContainer;
@@ -35,6 +34,7 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
+import com.google.gson.JsonSyntaxException;
 
 /**
  * {@link N5Reader} implementation through {@link KeyValueAccess} with JSON
@@ -70,7 +70,7 @@ default DatasetAttributes getDatasetAttributes(final String pathName) {
 			return null;
 
 		if (cacheMeta()) {
-			attributes = getCache().getAttributes(normalPath, N5KeyValueReader.ATTRIBUTES_JSON);
+			attributes = getCache().getAttributes(normalPath, getAttributesKey());
 		} else {
 			attributes = GsonKeyValueN5Reader.super.getAttributes(normalPath);
 		}
@@ -96,7 +96,7 @@ default <T> T getAttribute(
 
 		final JsonElement attributes;
 		if (cacheMeta()) {
-			attributes = getCache().getAttributes(normalPathName, N5KeyValueReader.ATTRIBUTES_JSON);
+			attributes = getCache().getAttributes(normalPathName, getAttributesKey());
 		} else {
 			attributes = GsonKeyValueN5Reader.super.getAttributes(normalPathName);
 		}
@@ -117,7 +117,7 @@ default <T> T getAttribute(
 		final String normalizedAttributePath = N5URI.normalizeAttributePath(key);
 		JsonElement attributes;
 		if (cacheMeta()) {
-			attributes = getCache().getAttributes(normalPathName, N5KeyValueReader.ATTRIBUTES_JSON);
+			attributes = getCache().getAttributes(normalPathName, getAttributesKey());
 		} else {
 			attributes = GsonKeyValueN5Reader.super.getAttributes(normalPathName);
 		}
@@ -133,7 +133,7 @@ default boolean exists(final String pathName) {
 
 		final String normalPathName = N5URI.normalizeGroupPath(pathName);
 		if (cacheMeta())
-			return getCache().isGroup(normalPathName, N5KeyValueReader.ATTRIBUTES_JSON);
+			return getCache().isGroup(normalPathName, getAttributesKey());
 		else {
 			return existsFromContainer(normalPathName, null);
 		}
@@ -176,7 +176,7 @@ default boolean datasetExists(final String pathName) throws N5IOException {
 
 		final String normalPathName = N5URI.normalizeGroupPath(pathName);
 		if (cacheMeta()) {
-			return getCache().isDataset(normalPathName, N5KeyValueReader.ATTRIBUTES_JSON);
+			return getCache().isDataset(normalPathName, getAttributesKey());
 		}
 		return isDatasetFromContainer(normalPathName);
 	}
@@ -208,7 +208,7 @@ default JsonElement getAttributes(final String pathName) throws N5IOException {
 
 		/* If cached, return the cache */
 		if (cacheMeta()) {
-			return getCache().getAttributes(groupPath, N5KeyValueReader.ATTRIBUTES_JSON);
+			return getCache().getAttributes(groupPath, getAttributesKey());
 		} else {
 			return GsonKeyValueN5Reader.super.getAttributes(groupPath);
 		}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Writer.java b/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Writer.java
index d95f6345..9d509051 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Writer.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Writer.java
@@ -59,9 +59,9 @@ default void createGroup(final String path) throws N5Exception {
 		// else if exists is true (then a dataset is present) so throw an exception to avoid
 		// overwriting / invalidating existing data
 		if (cacheMeta()) {
-			if (getCache().isGroup(normalPath, N5KeyValueReader.ATTRIBUTES_JSON))
+			if (getCache().isGroup(normalPath, getAttributesKey()))
 				return;
-			else if (getCache().exists(normalPath, N5KeyValueReader.ATTRIBUTES_JSON)) {
+			else if (getCache().exists(normalPath, getAttributesKey())) {
 				throw new N5Exception("Can't make a group on existing path.");
 			}
 		}
@@ -88,8 +88,8 @@ else if (getCache().exists(normalPath, N5KeyValueReader.ATTRIBUTES_JSON)) {
 			for (final String child : pathParts) {
 
 				final String childPath = parent.isEmpty() ? child : parent + "/" + child;
-				getCache().initializeNonemptyCache(childPath, N5KeyValueReader.ATTRIBUTES_JSON);
-				getCache().updateCacheInfo(childPath, N5KeyValueReader.ATTRIBUTES_JSON);
+				getCache().initializeNonemptyCache(childPath, getAttributesKey());
+				getCache().updateCacheInfo(childPath, getAttributesKey());
 
 				// only add if the parent exists and has children cached already
 				if (parent != null && !child.isEmpty())
@@ -130,7 +130,7 @@ default void writeAndCacheAttributes(
 				nullRespectingAttributes = getGson().toJsonTree(attributes);
 			}
 			/* Update the cache, and write to the writer */
-			getCache().updateCacheInfo(normalGroupPath, N5KeyValueReader.ATTRIBUTES_JSON, nullRespectingAttributes);
+			getCache().updateCacheInfo(normalGroupPath, getAttributesKey(), nullRespectingAttributes);
 		}
 	}
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/Compression.java b/src/main/java/org/janelia/saalfeldlab/n5/Compression.java
index df0ca49e..2e8b9cdf 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/Compression.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/Compression.java
@@ -25,6 +25,9 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.io.Serializable;
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Inherited;
@@ -32,14 +35,19 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
+import org.janelia.saalfeldlab.n5.codec.Codec;
 import org.scijava.annotations.Indexable;
 
 /**
+ * Deprecated: {@link Compression}s are no longer a special case.
+ * <br>
+ * Use {@link Codec.BytesCodec} for implementing compressors
+ * <p> </p>
  * Compression scheme interface.
  *
  * @author Stephan Saalfeld
  */
-public interface Compression extends Serializable {
+public interface Compression extends Serializable, Codec.BytesCodec {
 
 	/**
 	 * Annotation for runtime discovery of compression schemes.
@@ -49,7 +57,7 @@ public interface Compression extends Serializable {
 	@Inherited
 	@Target(ElementType.TYPE)
 	@Indexable
-	public static @interface CompressionType {
+	@interface CompressionType {
 
 		String value();
 	}
@@ -61,9 +69,10 @@ public interface Compression extends Serializable {
 	@Retention(RetentionPolicy.RUNTIME)
 	@Inherited
 	@Target(ElementType.FIELD)
-	public static @interface CompressionParameter {}
+	@interface CompressionParameter {}
 
-	public default String getType() {
+	@Override
+	default String getType() {
 
 		final CompressionType compressionType = getClass().getAnnotation(CompressionType.class);
 		if (compressionType == null)
@@ -72,7 +81,29 @@ public default String getType() {
 			return compressionType.value();
 	}
 
-	public BlockReader getReader();
 
-	public BlockWriter getWriter();
+	BlockReader getReader();
+
+	BlockWriter getWriter();
+
+	/**
+	 * Decode an {@link InputStream}.
+	 *
+	 * @param in
+	 *            input stream
+	 * @return the decoded input stream
+	 */
+	@Override
+	InputStream decode(InputStream in) throws IOException;
+
+	/**
+	 * Encode an {@link OutputStream}.
+	 *
+	 * @param out
+	 *            the output stream
+	 * @return the encoded output stream
+	 */
+	@Override
+	OutputStream encode(OutputStream out) throws IOException;
+
 }
\ No newline at end of file
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/DataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/DataBlock.java
index 3d9dc92a..5ccdbbaf 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/DataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/DataBlock.java
@@ -25,6 +25,9 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 /**
@@ -94,6 +97,10 @@ public interface DataBlock<T> {
 	 */
 	public void readData(final ByteBuffer buffer);
 
+	public void readData(final DataInput inputStream) throws IOException;
+
+	public void writeData(final DataOutput output) throws IOException;
+
 	/**
 	 * Returns the number of elements in this {@link DataBlock}. This number is
 	 * not necessarily equal {@link #getNumElements(int[])
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/DatasetAttributes.java b/src/main/java/org/janelia/saalfeldlab/n5/DatasetAttributes.java
index f4aea9fe..d2cfa4a5 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/DatasetAttributes.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/DatasetAttributes.java
@@ -1,33 +1,27 @@
-/**
- * Copyright (c) 2017, Stephan Saalfeld
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- *    this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- *    this list of conditions and the following disclaimer in the documentation
- *    and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGE.
- */
 package org.janelia.saalfeldlab.n5;
 
 import java.io.Serializable;
+import java.lang.reflect.Type;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.stream.Stream;
+
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.Codec.ArrayCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec.BytesCodec;
+import org.janelia.saalfeldlab.n5.codec.N5BlockCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardParameters;
+
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
 
 /**
  * Mandatory dataset attributes:
@@ -36,20 +30,26 @@
  * <li>long[] : dimensions</li>
  * <li>int[] : blockSize</li>
  * <li>{@link DataType} : dataType</li>
- * <li>{@link Compression} : compression</li>
+ * <li>{@link Codec}... : encode/decode routines</li>
  * </ol>
  *
  * @author Stephan Saalfeld
- *
  */
-public class DatasetAttributes implements Serializable {
+//TODO Caleb: try to delete ShardParameters?
+public class DatasetAttributes implements BlockParameters, ShardParameters, Serializable {
 
 	private static final long serialVersionUID = -4521467080388947553L;
 
 	public static final String DIMENSIONS_KEY = "dimensions";
 	public static final String BLOCK_SIZE_KEY = "blockSize";
+	public static final String SHARD_SIZE_KEY = "shardSize";
 	public static final String DATA_TYPE_KEY = "dataType";
 	public static final String COMPRESSION_KEY = "compression";
+	public static final String CODEC_KEY = "codecs";
+
+	public static final String[] N5_DATASET_ATTRIBUTES = new String[]{
+			DIMENSIONS_KEY, BLOCK_SIZE_KEY, DATA_TYPE_KEY, COMPRESSION_KEY, CODEC_KEY
+	};
 
 	/* version 0 */
 	protected static final String compressionTypeKey = "compressionType";
@@ -57,38 +57,132 @@ public class DatasetAttributes implements Serializable {
 	private final long[] dimensions;
 	private final int[] blockSize;
 	private final DataType dataType;
-	private final Compression compression;
+	private final ArrayCodec arrayCodec;
+	private final BytesCodec[] byteCodecs;
+	@Nullable private final int[] shardSize;
 
+	/**
+	 * Constructs a DatasetAttributes instance with specified dimensions, block size, data type,
+	 * and array of codecs.
+	 *
+	 * @param dimensions the dimensions of the dataset
+	 * @param blockSize  the size of the blocks in the dataset
+	 * @param dataType   the data type of the dataset
+	 * @param codecs     the codecs used encode/decode the data
+	 */
 	public DatasetAttributes(
 			final long[] dimensions,
+			@Nullable final int[] shardSize,
 			final int[] blockSize,
 			final DataType dataType,
-			final Compression compression) {
+			final Codec... codecs) {
 
 		this.dimensions = dimensions;
+		this.shardSize = shardSize;
 		this.blockSize = blockSize;
 		this.dataType = dataType;
-		this.compression = compression;
+		if (codecs == null || codecs.length == 0) {
+			byteCodecs = new BytesCodec[]{};
+			arrayCodec = new N5BlockCodec();
+		} else if (codecs.length == 1 && codecs[0] instanceof Compression) {
+			final BytesCodec compression = (BytesCodec)codecs[0];
+			byteCodecs = compression instanceof RawCompression ? new BytesCodec[]{} : new BytesCodec[]{compression};
+			arrayCodec = new N5BlockCodec();
+		} else {
+			if (!(codecs[0] instanceof ArrayCodec))
+				throw new N5Exception("Expected first element of codecs to be ArrayCodec, but was: " + codecs[0].getClass());
+
+			if (Arrays.stream(codecs).filter(c -> c instanceof ArrayCodec).count() > 1)
+				throw new N5Exception("Multiple ArrayCodecs found. Only one is allowed.");
+
+			arrayCodec = (ArrayCodec)codecs[0];
+			byteCodecs = Stream.of(codecs)
+					.skip(1)
+					.filter(c -> !(c instanceof RawCompression))
+					.filter(c -> c instanceof BytesCodec)
+					.toArray(BytesCodec[]::new);
+		}
+
+
 	}
 
+	/**
+	 * Constructs a DatasetAttributes instance with specified dimensions, block size, data type,
+	 * and array of codecs.
+	 *
+	 * @param dimensions the dimensions of the dataset
+	 * @param blockSize  the size of the blocks in the dataset
+	 * @param dataType   the data type of the dataset
+	 * @param codecs     the codecs used encode/decode the data
+	 */
+	public DatasetAttributes(
+			final long[] dimensions,
+			final int[] blockSize,
+			final DataType dataType,
+			final Codec... codecs) {
+		this( dimensions, null, blockSize, dataType, codecs );
+	}
+
+	/**
+	 * Deprecated. {@link Compression} are {@link Codec}. Use {@code Code...} constructor instead
+	 * Constructs a DatasetAttributes instance with specified dimensions, block size, data type,
+	 * and compression scheme. This constructor is deprecated and redirects to another constructor
+	 * with codec support.
+	 *
+	 * @param dimensions  the dimensions of the dataset
+	 * @param blockSize   the size of the blocks in the dataset
+	 * @param dataType    the data type of the dataset
+	 * @param compression the compression scheme used for storing the dataset
+	 */
+	@Deprecated
+	public DatasetAttributes(
+			final long[] dimensions,
+			final int[] blockSize,
+			final DataType dataType,
+			final Compression compression) {
+
+		this(dimensions, blockSize, dataType, (Codec)compression);
+	}
+
+	@Override
 	public long[] getDimensions() {
 
 		return dimensions;
 	}
 
+	@Override
 	public int getNumDimensions() {
 
 		return dimensions.length;
 	}
 
+	@Override
+	@CheckForNull
+	public int[] getShardSize() {
+
+		return shardSize;
+	}
+
+	@Override
 	public int[] getBlockSize() {
 
 		return blockSize;
 	}
 
+	/**
+	 * Deprecated. {@link Compression} is no longer a special case. prefer to reference {@link #getCodecs()}
+	 * Will return {@link RawCompression} if no compression is otherwise provided, for legacy compatibility.
+	 *
+	 * @return compression Codec, if one was present
+	 */
+	@Deprecated
 	public Compression getCompression() {
 
-		return compression;
+		return Arrays.stream(byteCodecs)
+				.filter(it -> it instanceof Compression)
+				.map(it -> (Compression)it)
+				.findFirst()
+				.orElse(new RawCompression());
 	}
 
 	public DataType getDataType() {
@@ -96,47 +190,136 @@ public DataType getDataType() {
 		return dataType;
 	}
 
+	public ArrayCodec getArrayCodec() {
+
+		return arrayCodec;
+	}
+
+	public BytesCodec[] getCodecs() {
+
+		return byteCodecs;
+	}
+
+	/**
+	 * Deprecated in favor of {@link DatasetAttributesAdapter} for serialization
+	 *
+	 * @return serilizable properties of {@link DatasetAttributes}
+	 */
+	@Deprecated
 	public HashMap<String, Object> asMap() {
 
 		final HashMap<String, Object> map = new HashMap<>();
 		map.put(DIMENSIONS_KEY, dimensions);
 		map.put(BLOCK_SIZE_KEY, blockSize);
 		map.put(DATA_TYPE_KEY, dataType);
-		map.put(COMPRESSION_KEY, compression);
+		map.put(COMPRESSION_KEY, getCompression());
 		return map;
 	}
 
-	static DatasetAttributes from(
-			final long[] dimensions,
-			final DataType dataType,
-			int[] blockSize,
-			Compression compression,
-			final String compressionVersion0Name) {
 
-		if (blockSize == null)
-			blockSize = Arrays.stream(dimensions).mapToInt(a -> (int)a).toArray();
+	protected Codec[] concatenateCodecs() {
+
+		final Codec[] allCodecs = new Codec[byteCodecs.length + 1];
+		allCodecs[0] = arrayCodec;
+		for (int i = 0; i < byteCodecs.length; i++)
+			allCodecs[i + 1] = byteCodecs[i];
+
+		return allCodecs;
+	}
+
+	private static DatasetAttributesAdapter adapter = null;
+	public static DatasetAttributesAdapter getJsonAdapter() {
+		if (adapter == null) {
+			adapter = new DatasetAttributesAdapter();
+		}
+		return adapter;
+	}
+
+	public static class InvalidN5DatasetException extends N5Exception {
+
+		public InvalidN5DatasetException(String dataset, String reason, Throwable cause) {
+
+			this(String.format("Invalid dataset %s: %s", dataset, reason), cause);
+		}
+
+		public InvalidN5DatasetException(String message, Throwable cause) {
+
+			super(message, cause);
+		}
+	}
+	public static class DatasetAttributesAdapter implements JsonSerializer<DatasetAttributes>, JsonDeserializer<DatasetAttributes> {
+
+		@Override public DatasetAttributes deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
+
+			if (json == null || !json.isJsonObject()) return null;
+			final JsonObject obj = json.getAsJsonObject();
+			final boolean validKeySet = obj.has(DIMENSIONS_KEY)
+					&& obj.has(BLOCK_SIZE_KEY)
+					&& obj.has(DATA_TYPE_KEY)
+					&& (obj.has(CODEC_KEY) || obj.has(COMPRESSION_KEY) || obj.has(compressionTypeKey));
+
+			if (!validKeySet)
+				return null;
+
+			final long[] dimensions = context.deserialize(obj.get(DIMENSIONS_KEY), long[].class);
+			final int[] blockSize = context.deserialize(obj.get(BLOCK_SIZE_KEY), int[].class);
+
+			int[] shardSize = null;
+			if (obj.has(SHARD_SIZE_KEY))
+				shardSize = context.deserialize(obj.get(SHARD_SIZE_KEY), int[].class);
+
+			final DataType dataType = context.deserialize(obj.get(DATA_TYPE_KEY), DataType.class);
+
+
+			final Codec[] codecs;
+			if (obj.has(CODEC_KEY)) {
+				codecs = context.deserialize(obj.get(CODEC_KEY), Codec[].class);
+			} else if (obj.has(COMPRESSION_KEY)) {
+				final Compression compression = CompressionAdapter.getJsonAdapter().deserialize(obj.get(COMPRESSION_KEY), Compression.class, context);
+				final N5BlockCodec n5BlockCodec = dataType == DataType.UINT8 || dataType == DataType.INT8 ? new N5BlockCodec(null) : new N5BlockCodec();
+				codecs = new Codec[]{compression, n5BlockCodec};
+			} else if (obj.has(compressionTypeKey)) {
+				final Compression compression = getCompressionVersion0(obj.get(compressionTypeKey).getAsString());
+				final N5BlockCodec n5BlockCodec = dataType == DataType.UINT8 || dataType == DataType.INT8 ? new N5BlockCodec(null) : new N5BlockCodec();
+				codecs = new Codec[]{compression, n5BlockCodec};
+			} else {
+				return null;
+			}
+			return new DatasetAttributes(dimensions, shardSize, blockSize, dataType, codecs);
+		}
+
+		@Override public JsonElement serialize(DatasetAttributes src, Type typeOfSrc, JsonSerializationContext context) {
+
+			final JsonObject obj = new JsonObject();
+			obj.add(DIMENSIONS_KEY, context.serialize(src.dimensions));
+			obj.add(BLOCK_SIZE_KEY, context.serialize(src.blockSize));
+
+			//TODO Caleb: Type Hierarchy Adapter for extensions?
+			final int[] shardSize = src.getShardSize();
+			if (shardSize != null) {
+				obj.add(SHARD_SIZE_KEY, context.serialize(shardSize));
+			}
+
+			obj.add(DATA_TYPE_KEY, context.serialize(src.dataType));
+			obj.add(CODEC_KEY, context.serialize(src.concatenateCodecs()));
+
+			return obj;
+		}
+		private static Compression getCompressionVersion0(final String compressionVersion0Name) {
 
-		/* version 0 */
-		if (compression == null) {
 			switch (compressionVersion0Name) {
 			case "raw":
-				compression = new RawCompression();
-				break;
+				return new RawCompression();
 			case "gzip":
-				compression = new GzipCompression();
-				break;
+				return new GzipCompression();
 			case "bzip2":
-				compression = new Bzip2Compression();
-				break;
+				return new Bzip2Compression();
 			case "lz4":
-				compression = new Lz4Compression();
-				break;
+				return new Lz4Compression();
 			case "xz":
-				compression = new XzCompression();
-				break;
+				return new XzCompression();
 			}
+			return null;
 		}
-
-		return new DatasetAttributes(dimensions, blockSize, dataType, compression);
 	}
 }
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockReader.java b/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockReader.java
index 58c59780..0a49fc19 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockReader.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockReader.java
@@ -30,6 +30,10 @@
 import java.io.InputStream;
 import java.nio.ByteBuffer;
 
+import org.janelia.saalfeldlab.n5.codec.Codec.ArrayCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec.BytesCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec.DataBlockInputStream;
+
 /**
  * Default implementation of {@link BlockReader}.
  *
@@ -45,12 +49,9 @@ public default <T, B extends DataBlock<T>> void read(
 			final B dataBlock,
 			final InputStream in) throws IOException {
 
-		final ByteBuffer buffer = dataBlock.toByteBuffer();
-		try (final InputStream inflater = getInputStream(in)) {
-			final DataInputStream dis = new DataInputStream(inflater);
-			dis.readFully(buffer.array());
-		}
-		dataBlock.readData(buffer);
+		// do not try with this input stream because subsequent block reads may happen if the stream points to a shard
+		final InputStream inflater = getInputStream(in);
+		readFromStream(dataBlock, inflater);
 	}
 
 	/**
@@ -71,28 +72,28 @@ public static DataBlock<?> readBlock(
 			final DatasetAttributes datasetAttributes,
 			final long[] gridPosition) throws IOException {
 
-		final DataInputStream dis = new DataInputStream(in);
-		final short mode = dis.readShort();
-		final int numElements;
-		final DataBlock<?> dataBlock;
-		if (mode != 2) {
-			final int nDim = dis.readShort();
-			final int[] blockSize = new int[nDim];
-			for (int d = 0; d < nDim; ++d)
-				blockSize[d] = dis.readInt();
-			if (mode == 0) {
-				numElements = DataBlock.getNumElements(blockSize);
-			} else {
-				numElements = dis.readInt();
-			}
-			dataBlock = datasetAttributes.getDataType().createDataBlock(blockSize, gridPosition, numElements);
-		} else {
-			numElements = dis.readInt();
-			dataBlock = datasetAttributes.getDataType().createDataBlock(null, gridPosition, numElements);
+		final BytesCodec[] codecs = datasetAttributes.getCodecs();
+		final ArrayCodec arrayCodec = datasetAttributes.getArrayCodec();
+		final DataBlockInputStream dataBlockStream = arrayCodec.decode(datasetAttributes, gridPosition, in);
+
+		InputStream stream = dataBlockStream;
+		for (final BytesCodec codec : codecs) {
+			stream = codec.decode(stream);
 		}
 
-		final BlockReader reader = datasetAttributes.getCompression().getReader();
-		reader.read(dataBlock, in);
+		final DataBlock<?> dataBlock = dataBlockStream.allocateDataBlock();
+		dataBlock.readData(dataBlockStream.getDataInput(stream));
+		stream.close();
+
 		return dataBlock;
 	}
+
+	public static <T, B extends DataBlock<T>> void readFromStream(final B dataBlock, final InputStream in) throws IOException {
+
+		final ByteBuffer buffer = dataBlock.toByteBuffer();
+		final DataInputStream dis = new DataInputStream(in);
+		dis.readFully(buffer.array());
+		dataBlock.readData(buffer);
+	}
+
 }
\ No newline at end of file
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockWriter.java b/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockWriter.java
index c53aae2d..5f27c930 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockWriter.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/DefaultBlockWriter.java
@@ -25,11 +25,16 @@
  */
 package org.janelia.saalfeldlab.n5;
 
-import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
+import org.janelia.saalfeldlab.n5.codec.Codec.ArrayCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec.BytesCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec.DataBlockOutputStream;
+
+import static org.janelia.saalfeldlab.n5.codec.Codec.encode;
+
 /**
  * Default implementation of {@link BlockWriter}.
  *
@@ -70,29 +75,19 @@ public static <T> void writeBlock(
 			final DatasetAttributes datasetAttributes,
 			final DataBlock<T> dataBlock) throws IOException {
 
-		final DataOutputStream dos = new DataOutputStream(out);
-
-		final int mode;
-		if (datasetAttributes.getDataType() == DataType.OBJECT || dataBlock.getSize() == null)
-			mode = 2;
-		else if (dataBlock.getNumElements() == DataBlock.getNumElements(dataBlock.getSize()))
-			mode = 0;
-		else
-			mode = 1;
-		dos.writeShort(mode);
+		final BytesCodec[] codecs = datasetAttributes.getCodecs();
+		final ArrayCodec arrayCodec = datasetAttributes.getArrayCodec();
+		final DataBlockOutputStream dataBlockOutput = arrayCodec.encode(datasetAttributes, dataBlock, out);
 
-		if (mode != 2) {
-			dos.writeShort(datasetAttributes.getNumDimensions());
-			for (final int size : dataBlock.getSize())
-				dos.writeInt(size);
-		}
+		OutputStream stream = encode(dataBlockOutput, codecs);
 
-		if (mode != 0)
-			dos.writeInt(dataBlock.getNumElements());
+		dataBlock.writeData(dataBlockOutput.getDataOutput(stream));
+		stream.close();
+	}
 
-		dos.flush();
+	public static <T> void writeFromStream(final DataBlock<T> dataBlock, final OutputStream out) throws IOException {
 
-		final BlockWriter writer = datasetAttributes.getCompression().getWriter();
-		writer.write(dataBlock, out);
+		final ByteBuffer buffer = dataBlock.toByteBuffer();
+		out.write(buffer.array());
 	}
 }
\ No newline at end of file
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/DoubleArrayDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/DoubleArrayDataBlock.java
index 426c7944..0240e6fa 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/DoubleArrayDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/DoubleArrayDataBlock.java
@@ -25,6 +25,9 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 public class DoubleArrayDataBlock extends AbstractDataBlock<double[]> {
@@ -48,6 +51,20 @@ public void readData(final ByteBuffer buffer) {
 		buffer.asDoubleBuffer().get(data);
 	}
 
+	@Override
+	public void readData(final DataInput inputStream) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			data[i] = inputStream.readDouble();
+	}
+
+	@Override
+	public void writeData(final DataOutput output) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			output.writeDouble(data[i]);
+	}
+
 	@Override
 	public int getNumElements() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccess.java b/src/main/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccess.java
index cfb45592..05afb2d1 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccess.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccess.java
@@ -25,6 +25,8 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import org.apache.commons.io.input.BoundedInputStream;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -71,13 +73,35 @@ protected class LockedFileChannel implements LockedChannel {
 
 		protected final FileChannel channel;
 
+		protected final boolean truncate;
+
+		protected long len;
+
 		protected LockedFileChannel(final String path, final boolean readOnly) throws IOException {
 
-			this(fileSystem.getPath(path), readOnly);
+			this(fileSystem.getPath(path), readOnly, 0, Long.MAX_VALUE);
+		}
+
+		protected LockedFileChannel(final String path, final boolean readOnly, final long startByte, final long size) throws IOException {
+
+			this(fileSystem.getPath(path), readOnly, startByte, size);
 		}
 
 		protected LockedFileChannel(final Path path, final boolean readOnly) throws IOException {
 
+			this(path, readOnly, 0, Long.MAX_VALUE);
+		}
+
+		protected LockedFileChannel(final Path path, final boolean readOnly, final long startByte, final long size)
+				throws IOException {
+
+
+			final long start = startByte < 0 ? 0L : startByte;
+			len = size < 0 ? Long.MAX_VALUE : size;
+
+			//TODO Caleb: How does this handle if manually overwriting the entire file? (e.g. len > file size)
+			truncate = (start == 0 && len == Long.MAX_VALUE);
+
 			final OpenOption[] options;
 			if (readOnly) {
 				options = new OpenOption[]{StandardOpenOption.READ};
@@ -96,10 +120,13 @@ protected LockedFileChannel(final Path path, final boolean readOnly) throws IOEx
 				}
 			}
 
+			if (startByte != 0)
+				channel.position(start);
+
 			for (boolean waiting = true; waiting;) {
 				waiting = false;
 				try {
-					channel.lock(0L, Long.MAX_VALUE, readOnly);
+					channel.lock(start, len, readOnly);
 				} catch (final OverlappingFileLockException e) {
 					waiting = true;
 					try {
@@ -112,6 +139,12 @@ protected LockedFileChannel(final Path path, final boolean readOnly) throws IOEx
 			}
 		}
 
+		@Override
+		public long size() throws IOException {
+
+			return channel.size();
+		}
+
 		@Override
 		public Reader newReader() throws IOException {
 
@@ -121,20 +154,24 @@ public Reader newReader() throws IOException {
 		@Override
 		public Writer newWriter() throws IOException {
 
-			channel.truncate(0);
+			if (truncate)
+				channel.truncate(0);
+
 			return Channels.newWriter(channel, StandardCharsets.UTF_8.name());
 		}
 
 		@Override
 		public InputStream newInputStream() throws IOException {
 
-			return Channels.newInputStream(channel);
+			return BoundedInputStream.builder().setInputStream(Channels.newInputStream(channel)).setMaxCount(len).get();
 		}
 
 		@Override
 		public OutputStream newOutputStream() throws IOException {
 
-			channel.truncate(0);
+			if (truncate)
+				channel.truncate(0);
+
 			return Channels.newOutputStream(channel);
 		}
 
@@ -162,7 +199,18 @@ public LockedFileChannel lockForReading(final String normalPath) throws IOExcept
 
 		try {
 			return new LockedFileChannel(normalPath, true);
-		} catch (NoSuchFileException e) {
+		} catch (final NoSuchFileException e) {
+			throw new N5Exception.N5NoSuchKeyException("No such file", e);
+		}
+	}
+
+	@Override
+	public LockedFileChannel lockForReading(final String normalPath, final long startByte, final long size)
+			throws IOException {
+
+		try {
+			return new LockedFileChannel(normalPath, true, startByte, size);
+		} catch (final NoSuchFileException e) {
 			throw new N5Exception.N5NoSuchKeyException("No such file", e);
 		}
 	}
@@ -173,11 +221,18 @@ public LockedFileChannel lockForWriting(final String normalPath) throws IOExcept
 		return new LockedFileChannel(normalPath, false);
 	}
 
+	@Override
+	public LockedFileChannel lockForWriting(final String normalPath, final long startByte, final long size)
+			throws IOException {
+
+		return new LockedFileChannel(normalPath, false, startByte, size);
+	}
+
 	public LockedFileChannel lockForReading(final Path path) throws IOException {
 
 		try {
 			return new LockedFileChannel(path, true);
-		} catch (NoSuchFileException e) {
+		} catch (final NoSuchFileException e) {
 			throw new N5Exception.N5NoSuchKeyException("No such file", e);
 		}
 	}
@@ -208,6 +263,16 @@ public boolean exists(final String normalPath) {
 		return Files.exists(path);
 	}
 
+	@Override
+	public long size(final String normalPath) throws IOException {
+
+		try {
+			return Files.size(fileSystem.getPath(normalPath));
+		} catch (NoSuchFileException e) {
+			throw new N5Exception.N5NoSuchKeyException("No such file", e);
+		}
+	}
+
 	@Override
 	public String[] listDirectories(final String normalPath) throws IOException {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/FloatArrayDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/FloatArrayDataBlock.java
index b8d30999..a2bc2c69 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/FloatArrayDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/FloatArrayDataBlock.java
@@ -25,6 +25,9 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 public class FloatArrayDataBlock extends AbstractDataBlock<float[]> {
@@ -48,6 +51,20 @@ public void readData(final ByteBuffer buffer) {
 		buffer.asFloatBuffer().get(data);
 	}
 
+	@Override
+	public void readData(final DataInput inputStream) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			data[i] = inputStream.readFloat();
+	}
+
+	@Override
+	public void writeData(final DataOutput output) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			output.writeFloat(data[i]);
+	}
+
 	@Override
 	public int getNumElements() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Reader.java b/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Reader.java
index 4e728eb1..78e10811 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Reader.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Reader.java
@@ -25,14 +25,19 @@
  */
 package org.janelia.saalfeldlab.n5;
 
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.util.Arrays;
-
-import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
-
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
+import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
+import org.janelia.saalfeldlab.n5.shard.Shard;
+import org.janelia.saalfeldlab.n5.shard.VirtualShard;
+import org.janelia.saalfeldlab.n5.util.Position;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 
 /**
  * {@link N5Reader} implementation through {@link KeyValueAccess} with JSON
@@ -86,23 +91,56 @@ default JsonElement getAttributes(final String pathName) throws N5Exception {
 
 	}
 
+	@SuppressWarnings({ "unchecked", "rawtypes" })
+	default <T> Shard<T> readShard(
+			final String keyPath,
+			final DatasetAttributes datasetAttributes,
+			long... shardGridPosition) {
+
+		final String path = absoluteDataBlockPath(N5URI.normalizeGroupPath(keyPath), shardGridPosition);
+		return new VirtualShard<>(datasetAttributes, shardGridPosition, getKeyValueAccess(), path);
+	}
+
 	@Override
-	default DataBlock<?> readBlock(
+	default <T> DataBlock<T> readBlock(
 			final String pathName,
 			final DatasetAttributes datasetAttributes,
 			final long... gridPosition) throws N5Exception {
 
-		final String path = absoluteDataBlockPath(N5URI.normalizeGroupPath(pathName), gridPosition);
+		final long[] keyPos = datasetAttributes.getArrayCodec().getPositionForBlock(datasetAttributes, gridPosition);
+		final String keyPath = absoluteDataBlockPath(N5URI.normalizeGroupPath(pathName), keyPos);
 
-		try (final LockedChannel lockedChannel = getKeyValueAccess().lockForReading(path)) {
-			return DefaultBlockReader.readBlock(lockedChannel.newInputStream(), datasetAttributes, gridPosition);
-		} catch (final N5Exception.N5NoSuchKeyException e) {
-			return null;
-		} catch (final IOException | UncheckedIOException e) {
-			throw new N5IOException(
-					"Failed to read block " + Arrays.toString(gridPosition) + " from dataset " + path,
-					e);
+		return datasetAttributes.getArrayCodec().readBlock(
+				getKeyValueAccess(),
+				keyPath,
+				datasetAttributes,
+				gridPosition
+		);
+	}
+
+	@Override
+	default <T> List<DataBlock<T>> readBlocks(
+			final String pathName,
+			final DatasetAttributes datasetAttributes,
+			final List<long[]> blockPositions) throws N5Exception {
+
+		// TODO which interface should have this implementation?
+		if (datasetAttributes.getShardSize() != null) {
+			/* Group by shard position */
+			final Map<Position, List<long[]>> shardBlockMap = datasetAttributes.groupBlockPositions(blockPositions);
+			final ArrayList<DataBlock<T>> blocks = new ArrayList<>();
+			for( Entry<Position, List<long[]>> e : shardBlockMap.entrySet()) {
+
+				final Shard<T> shard = readShard(pathName, datasetAttributes, e.getKey().get());
+
+				for (final long[] blkPosition : e.getValue()) {
+					blocks.add(shard.getBlock(blkPosition));
+				}
+			}
+
+			return blocks;
 		}
+		return GsonN5Reader.super.readBlocks(pathName, datasetAttributes, blockPositions);
 	}
 
 	@Override
@@ -118,6 +156,9 @@ default String[] list(final String pathName) throws N5Exception {
 	/**
 	 * Constructs the path for a data block in a dataset at a given grid
 	 * position.
+	 * <br>
+	 * If the gridPosition passed in refers to shard position
+	 * in a sharded dataset, this will return the path to the shard key
 	 * <p>
 	 * The returned path is
 	 *
@@ -145,6 +186,8 @@ default String absoluteDataBlockPath(
 		return getKeyValueAccess().compose(getURI(), components);
 	}
 
+
+
 	/**
 	 * Constructs the absolute path (in terms of this store) for the group or
 	 * dataset.
@@ -168,6 +211,6 @@ default String absoluteGroupPath(final String normalGroupPath) {
 	 */
 	default String absoluteAttributesPath(final String normalPath) {
 
-		return getKeyValueAccess().compose(getURI(), normalPath, N5KeyValueReader.ATTRIBUTES_JSON);
+		return getKeyValueAccess().compose(getURI(), normalPath, getAttributesKey());
 	}
 }
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Writer.java b/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Writer.java
index 38c754be..b5e4cdf2 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Writer.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Writer.java
@@ -26,18 +26,26 @@
 package org.janelia.saalfeldlab.n5;
 
 import java.io.IOException;
+import java.io.OutputStream;
 import java.io.UncheckedIOException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
 
-import com.google.gson.JsonSyntaxException;
 import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
+import org.janelia.saalfeldlab.n5.shard.InMemoryShard;
+import org.janelia.saalfeldlab.n5.shard.Shard;
+import org.janelia.saalfeldlab.n5.shard.ShardParameters;
 
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonNull;
 import com.google.gson.JsonObject;
+import com.google.gson.JsonSyntaxException;
+import org.janelia.saalfeldlab.n5.shard.VirtualShard;
+import org.janelia.saalfeldlab.n5.util.Position;
 
 /**
  * Default implementation of {@link N5Writer} with JSON attributes parsed with
@@ -74,7 +82,7 @@ default void createGroup(final String path) throws N5Exception {
 		try {
 			getKeyValueAccess().createDirectories(absoluteGroupPath(normalPath));
 		} catch (final IOException | UncheckedIOException e) {
-			throw new N5Exception.N5IOException("Failed to create group " + path, e);
+			throw new N5IOException("Failed to create group " + path, e);
 		}
 	}
 
@@ -98,7 +106,7 @@ default void writeAttributes(
 		try (final LockedChannel lock = getKeyValueAccess().lockForWriting(absoluteAttributesPath(normalGroupPath))) {
 			GsonUtils.writeAttributes(lock.newWriter(), attributes, getGson());
 		} catch (final IOException | UncheckedIOException e) {
-			throw new N5Exception.N5IOException("Failed to write attributes into " + normalGroupPath, e);
+			throw new N5IOException("Failed to write attributes into " + normalGroupPath, e);
 		}
 	}
 
@@ -193,7 +201,7 @@ default <T> T removeAttribute(final String pathName, final String key, final Cla
 			throw new N5Exception.N5ClassCastException(e);
 		}
 		if (obj != null) {
-			writeAttributes(normalPath, attributes);
+			setAttributes(normalPath, attributes);
 		}
 		return obj;
 	}
@@ -210,19 +218,65 @@ default boolean removeAttributes(final String pathName, final List<String> attri
 		return removed;
 	}
 
+	@Override default <T> void writeBlocks(
+			final String datasetPath,
+			final DatasetAttributes datasetAttributes,
+			final DataBlock<T>... dataBlocks) throws N5Exception {
+
+		if (datasetAttributes.getShardSize() != null) {
+
+			/* Group blocks by shard index */
+			final Map<Position, List<DataBlock<T>>> shardBlockMap = datasetAttributes.groupBlocks(
+					Arrays.stream(dataBlocks).collect(Collectors.toList()));
+
+			for( final Entry<Position, List<DataBlock<T>>> e : shardBlockMap.entrySet()) {
+
+				final long[] shardPosition = e.getKey().get();
+				final Shard<T> currentShard = readShard(datasetPath, datasetAttributes,
+						shardPosition);
+
+				final InMemoryShard<T> newShard = InMemoryShard.fromShard(currentShard);
+				for( DataBlock<T> blk : e.getValue())
+					newShard.addBlock(blk);
+
+				writeShard(datasetPath, datasetAttributes, newShard);
+			}
+
+		} else {
+			GsonN5Writer.super.writeBlocks(datasetPath, datasetAttributes, dataBlocks);
+		}
+	}
+
 	@Override
 	default <T> void writeBlock(
 			final String path,
 			final DatasetAttributes datasetAttributes,
 			final DataBlock<T> dataBlock) throws N5Exception {
 
-		final String blockPath = absoluteDataBlockPath(N5URI.normalizeGroupPath(path), dataBlock.getGridPosition());
-		try (final LockedChannel lock = getKeyValueAccess().lockForWriting(blockPath)) {
-			DefaultBlockWriter.writeBlock(lock.newOutputStream(), datasetAttributes, dataBlock);
+		final long[] keyPos = datasetAttributes.getArrayCodec().getPositionForBlock(datasetAttributes, dataBlock);
+		final String keyPath = absoluteDataBlockPath(N5URI.normalizeGroupPath(path), keyPos);
+
+		datasetAttributes.getArrayCodec().writeBlock(
+				getKeyValueAccess(),
+				keyPath,
+				datasetAttributes,
+				dataBlock);
+	}
+
+	@Override
+	default <T> void writeShard(
+			final String path,
+			final DatasetAttributes datasetAttributes,
+			final Shard<T> shard) throws N5Exception {
+
+		final String shardPath = absoluteDataBlockPath(N5URI.normalizeGroupPath(path), shard.getGridPosition());
+		try (final LockedChannel lock = getKeyValueAccess().lockForWriting(shardPath)) {
+			try (final OutputStream shardOut = lock.newOutputStream()) {
+				InMemoryShard.fromShard(shard).write(shardOut);
+			}
 		} catch (final IOException | UncheckedIOException e) {
 			throw new N5IOException(
-					"Failed to write block " + Arrays.toString(dataBlock.getGridPosition()) + " into dataset " + path,
-					e);
+					"Failed to write shard " + Arrays.toString(shard.getGridPosition()) + " into dataset " + path, e);
 		}
 	}
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/GsonN5Reader.java b/src/main/java/org/janelia/saalfeldlab/n5/GsonN5Reader.java
index be16ed08..0ba185e3 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/GsonN5Reader.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/GsonN5Reader.java
@@ -28,6 +28,10 @@
 import java.lang.reflect.Type;
 import java.util.Map;
 
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonParseException;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonSyntaxException;
@@ -40,6 +44,8 @@ public interface GsonN5Reader extends N5Reader {
 
 	Gson getGson();
 
+	public String getAttributesKey();
+
 	@Override
 	default Map<String, Class<?>> listAttributes(final String pathName) throws N5Exception {
 
@@ -56,30 +62,15 @@ default DatasetAttributes getDatasetAttributes(final String pathName) throws N5E
 
 	default DatasetAttributes createDatasetAttributes(final JsonElement attributes) {
 
-		try {
-			final long[] dimensions = GsonUtils.readAttribute(attributes, DatasetAttributes.DIMENSIONS_KEY, long[].class, getGson());
-			if (dimensions == null) {
-				return null;
-			}
+		final JsonDeserializationContext context = new JsonDeserializationContext() {
 
-			final DataType dataType = GsonUtils.readAttribute(attributes, DatasetAttributes.DATA_TYPE_KEY, DataType.class, getGson());
-			if (dataType == null) {
-				return null;
-			}
+			@Override public <T> T deserialize(JsonElement json, Type typeOfT) throws JsonParseException {
 
-			final int[] blockSize = GsonUtils.readAttribute(attributes, DatasetAttributes.BLOCK_SIZE_KEY, int[].class, getGson());
-			final Compression compression = GsonUtils.readAttribute(attributes, DatasetAttributes.COMPRESSION_KEY, Compression.class, getGson());
-
-			/* version 0 */
-			final String compressionVersion0Name = compression == null
-					? GsonUtils.readAttribute(attributes, DatasetAttributes.compressionTypeKey, String.class, getGson())
-					: null;
+				return getGson().fromJson(json, typeOfT);
+			}
+		};
 
-			return DatasetAttributes.from(dimensions, dataType, blockSize, compression, compressionVersion0Name);
-		} catch (JsonSyntaxException | NumberFormatException | ClassCastException e) {
-			/* We cannot create a dataset, so return null. */
-			return null;
-		}
+		return DatasetAttributes.getJsonAdapter().deserialize(attributes, DatasetAttributes.class, context);
 	}
 
 	@Override
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/GsonUtils.java b/src/main/java/org/janelia/saalfeldlab/n5/GsonUtils.java
index f8a5677e..03741476 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/GsonUtils.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/GsonUtils.java
@@ -30,10 +30,14 @@
 import java.io.Writer;
 import java.lang.reflect.Array;
 import java.lang.reflect.Type;
+import java.nio.ByteOrder;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
 
+import org.janelia.saalfeldlab.n5.codec.RawBytes;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.google.gson.JsonArray;
@@ -42,6 +46,7 @@
 import com.google.gson.JsonPrimitive;
 import com.google.gson.JsonSyntaxException;
 import com.google.gson.reflect.TypeToken;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec;
 
 /**
  * Utility class for working with  JSON.
@@ -54,6 +59,10 @@ static Gson registerGson(final GsonBuilder gsonBuilder) {
 
 		gsonBuilder.registerTypeAdapter(DataType.class, new DataType.JsonAdapter());
 		gsonBuilder.registerTypeHierarchyAdapter(Compression.class, CompressionAdapter.getJsonAdapter());
+		gsonBuilder.registerTypeHierarchyAdapter(DatasetAttributes.class, DatasetAttributes.getJsonAdapter());
+		gsonBuilder.registerTypeHierarchyAdapter(Codec.class, NameConfigAdapter.getJsonAdapter(Codec.class));
+		gsonBuilder.registerTypeHierarchyAdapter(ByteOrder.class, RawBytes.byteOrderAdapter);
+		gsonBuilder.registerTypeHierarchyAdapter(ShardingCodec.IndexLocation.class, ShardingCodec.indexLocationAdapter);
 		gsonBuilder.disableHtmlEscaping();
 		return gsonBuilder.create();
 	}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/GzipCompression.java b/src/main/java/org/janelia/saalfeldlab/n5/GzipCompression.java
index b691a6d3..b03a4d93 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/GzipCompression.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/GzipCompression.java
@@ -37,16 +37,24 @@
 import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
 import org.apache.commons.compress.compressors.gzip.GzipParameters;
 import org.janelia.saalfeldlab.n5.Compression.CompressionType;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
 
 @CompressionType("gzip")
+@NameConfig.Name("gzip")
 public class GzipCompression implements DefaultBlockReader, DefaultBlockWriter, Compression {
 
 	private static final long serialVersionUID = 8630847239813334263L;
 
 	@CompressionParameter
+	@NameConfig.Parameter
+	//TODO Caleb: How to handle serialization of parameter-less constructor.
+	// For N5 the default is -1.
+	// For zarr the range is 0-9 and is required.
+	// How to map -1 to some default (1?) when serializing to zarr?
 	private final int level;
 
 	@CompressionParameter
+	@NameConfig.Parameter(optional = true)
 	private final boolean useZlib;
 
 	private final transient GzipParameters parameters = new GzipParameters();
@@ -68,7 +76,7 @@ public GzipCompression(final int level, final boolean useZlib) {
 	}
 
 	@Override
-	public InputStream getInputStream(final InputStream in) throws IOException {
+	public InputStream decode(InputStream in) throws IOException {
 
 		if (useZlib) {
 			return new InflaterInputStream(in);
@@ -78,7 +86,13 @@ public InputStream getInputStream(final InputStream in) throws IOException {
 	}
 
 	@Override
-	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+	public InputStream getInputStream(final InputStream in) throws IOException {
+
+		return decode(in);
+	}
+
+	@Override
+	public OutputStream encode(OutputStream out) throws IOException {
 
 		if (useZlib) {
 			return new DeflaterOutputStream(out, new Deflater(level));
@@ -88,6 +102,12 @@ public OutputStream getOutputStream(final OutputStream out) throws IOException {
 		}
 	}
 
+	@Override
+	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+
+		return encode(out);
+	}
+
 	@Override
 	public GzipCompression getReader() {
 
@@ -116,4 +136,5 @@ public boolean equals(final Object other) {
 			return useZlib == gz.useZlib && level == gz.level;
 		}
 	}
+
 }
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/IntArrayDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/IntArrayDataBlock.java
index 98c5577d..4d338332 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/IntArrayDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/IntArrayDataBlock.java
@@ -25,10 +25,13 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
-public class IntArrayDataBlock extends AbstractDataBlock<int[]> {
 
+public class IntArrayDataBlock extends AbstractDataBlock<int[]> {
 	public IntArrayDataBlock(final int[] size, final long[] gridPosition, final int[] data) {
 
 		super(size, gridPosition, data);
@@ -48,6 +51,20 @@ public void readData(final ByteBuffer buffer) {
 		buffer.asIntBuffer().get(data);
 	}
 
+	@Override
+	public void readData(final DataInput input) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			data[i] = input.readInt();
+	}
+
+	@Override
+	public void writeData(final DataOutput output) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			output.writeInt(data[i]);
+	}
+
 	@Override
 	public int getNumElements() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/KeyValueAccess.java b/src/main/java/org/janelia/saalfeldlab/n5/KeyValueAccess.java
index ea09269b..138bb73c 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/KeyValueAccess.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/KeyValueAccess.java
@@ -117,6 +117,8 @@ public default String compose(final URI uri, final String... components) {
 	 */
 	public boolean exists(final String normalPath);
 
+	public long size(final String normalPath) throws IOException;
+
 	/**
 	 * Test whether the path is a directory.
 	 *
@@ -155,6 +157,9 @@ public default String compose(final URI uri, final String... components) {
 	 */
 	public LockedChannel lockForReading(final String normalPath) throws IOException;
 
+	public LockedChannel lockForReading(String normalPath, final long startByte, final long size)
+			throws IOException;
+
 	/**
 	 * Create an exclusive lock on a path for writing. If the file doesn't
 	 * exist yet, it will be created, including all directories leading up to
@@ -175,6 +180,9 @@ public default String compose(final URI uri, final String... components) {
 	 */
 	public LockedChannel lockForWriting(final String normalPath) throws IOException;
 
+	public LockedChannel lockForWriting(String normalPath, final long startByte, final long size)
+			throws IOException;
+
 	/**
 	 * List all 'directory'-like children of a path.
 	 *
@@ -222,4 +230,5 @@ public default String compose(final URI uri, final String... components) {
 	 *            if an error occurs during deletion
 	 */
 	public void delete(final String normalPath) throws IOException;
+
 }
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/LockedChannel.java b/src/main/java/org/janelia/saalfeldlab/n5/LockedChannel.java
index bd34a59d..c3c53f50 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/LockedChannel.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/LockedChannel.java
@@ -40,6 +40,8 @@
  */
 public interface LockedChannel extends Closeable {
 
+	public long size() throws IOException;
+
 	/**
 	 * Create a UTF-8 {@link Reader}.
 	 *
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/LongArrayDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/LongArrayDataBlock.java
index d3f3fc9c..be435c4f 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/LongArrayDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/LongArrayDataBlock.java
@@ -25,6 +25,9 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 public class LongArrayDataBlock extends AbstractDataBlock<long[]> {
@@ -48,6 +51,20 @@ public void readData(final ByteBuffer buffer) {
 		buffer.asLongBuffer().get(data);
 	}
 
+	@Override
+	public void readData(final DataInput inputStream) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			data[i] = inputStream.readLong();
+	}
+
+	@Override
+	public void writeData(final DataOutput output) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			output.writeLong(data[i]);
+	}
+
 	@Override
 	public int getNumElements() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/Lz4Compression.java b/src/main/java/org/janelia/saalfeldlab/n5/Lz4Compression.java
index d76e4fe5..654ca4b5 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/Lz4Compression.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/Lz4Compression.java
@@ -33,8 +33,10 @@
 
 import net.jpountz.lz4.LZ4BlockInputStream;
 import net.jpountz.lz4.LZ4BlockOutputStream;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
 
 @CompressionType("lz4")
+@NameConfig.Name("lz4")
 public class Lz4Compression implements DefaultBlockReader, DefaultBlockWriter, Compression {
 
 	private static final long serialVersionUID = -9071316415067427256L;
@@ -53,17 +55,29 @@ public Lz4Compression() {
 	}
 
 	@Override
-	public InputStream getInputStream(final InputStream in) throws IOException {
+	public InputStream decode(final InputStream in) throws IOException {
 
 		return new LZ4BlockInputStream(in);
 	}
 
 	@Override
-	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+	public InputStream getInputStream(final InputStream in) throws IOException {
+
+		return decode(in);
+	}
+
+	@Override
+	public OutputStream encode(final OutputStream out) throws IOException {
 
 		return new LZ4BlockOutputStream(out, blockSize);
 	}
 
+	@Override
+	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+
+		return encode(out);
+	}
+
 	@Override
 	public Lz4Compression getReader() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/N5Exception.java b/src/main/java/org/janelia/saalfeldlab/n5/N5Exception.java
index 345a7cd0..7fbe0135 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/N5Exception.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/N5Exception.java
@@ -121,4 +121,31 @@ protected N5NoSuchKeyException(
 			super(message, cause, enableSuppression, writableStackTrace);
 		}
 	}
+	
+	public static class N5ShardException extends N5IOException {
+
+		public N5ShardException(final String message) {
+
+			super(message);
+		}
+
+		public N5ShardException(final String message, final Throwable cause) {
+
+			super(message, cause);
+		}
+
+		public N5ShardException(final Throwable cause) {
+
+			super(cause);
+		}
+
+		protected N5ShardException(
+				final String message,
+				final Throwable cause,
+				final boolean enableSuppression,
+				final boolean writableStackTrace) {
+
+			super(message, cause, enableSuppression, writableStackTrace);
+		}
+	}
 }
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/N5KeyValueReader.java b/src/main/java/org/janelia/saalfeldlab/n5/N5KeyValueReader.java
index 29e43ce6..dd4b9cc2 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/N5KeyValueReader.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/N5KeyValueReader.java
@@ -123,7 +123,11 @@ protected N5KeyValueReader(
 		this.keyValueAccess = keyValueAccess;
 		this.gson = GsonUtils.registerGson(gsonBuilder);
 		this.cacheMeta = cacheMeta;
-		this.cache = newCache();
+
+		if (this.cacheMeta)
+			this.cache = newCache();
+		else
+			this.cache = null;
 
 		try {
 			uri = keyValueAccess.uri(basePath);
@@ -144,6 +148,12 @@ protected N5KeyValueReader(
 			throw new N5Exception.N5IOException("No container exists at " + basePath);
 	}
 
+	@Override
+	public String getAttributesKey() {
+
+		return ATTRIBUTES_JSON;
+	}
+
 	@Override
 	public Gson getGson() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/N5Reader.java b/src/main/java/org/janelia/saalfeldlab/n5/N5Reader.java
index 010e757f..20a04b33 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/N5Reader.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/N5Reader.java
@@ -25,6 +25,8 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import org.janelia.saalfeldlab.n5.shard.Shard;
+
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.ObjectInputStream;
@@ -52,7 +54,7 @@
  */
 public interface N5Reader extends AutoCloseable {
 
-	public static class Version {
+	class Version {
 
 		private final int major;
 		private final int minor;
@@ -189,12 +191,12 @@ public boolean isCompatible(final Version version) {
 	/**
 	 * SemVer version of this N5 spec.
 	 */
-	public static final Version VERSION = new Version(4, 0, 0);
+	Version VERSION = new Version(4, 0, 0);
 
 	/**
 	 * Version attribute key.
 	 */
-	public static final String VERSION_KEY = "n5";
+	String VERSION_KEY = "n5";
 
 	/**
 	 * Get the SemVer version of this container as specified in the 'version'
@@ -220,7 +222,7 @@ default Version getVersion() throws N5Exception {
 	 * @return the base path URI
 	 */
 	// TODO: should this throw URISyntaxException or can we assume that this is
-	// never possible if we were able to instantiate this N5Reader?
+	//   never possible if we were able to instantiate this N5Reader?
 	URI getURI();
 
 	/**
@@ -288,11 +290,50 @@ <T> T getAttribute(
 	 * @throws N5Exception
 	 *             the exception
 	 */
-	DataBlock<?> readBlock(
+	<T> DataBlock<T> readBlock(
 			final String pathName,
 			final DatasetAttributes datasetAttributes,
 			final long... gridPosition) throws N5Exception;
 
+	/**
+	 * Reads the {@link Shard} at the corresponding grid position.
+	 *
+	 * @param <T> the data access type for the blocks in the shard
+	 * @param datasetPath to read the shard from
+	 * @param datasetAttributes for the shard
+	 * @param shardGridPosition of the shard we are reading
+	 * @return the shard
+	 */
+	<T> Shard<T> readShard(final String datasetPath, final DatasetAttributes datasetAttributes, long... shardGridPosition);
+
+	/**
+	 * Reads multiple {@link DataBlock}s.
+	 * <p>
+	 * Implementations may optimize / batch read operations when possible, e.g.
+	 * in the case that the datasets are sharded.
+	 *
+	 * @param pathName
+	 *            dataset path
+	 * @param datasetAttributes
+	 *            the dataset attributes
+	 * @param gridPositions
+	 *            a list of grid positions
+	 * @return a list of data blocks
+	 * @throws N5Exception
+	 *             the exception
+	 */
+	default <T> List<DataBlock<T>> readBlocks(
+			final String pathName,
+			final DatasetAttributes datasetAttributes,
+			final List<long[]> gridPositions) throws N5Exception {
+
+		final ArrayList<DataBlock<T>> blocks = new ArrayList<>();
+		for( final long[] p : gridPositions )
+			blocks.add(readBlock(pathName, datasetAttributes, p));
+
+		return blocks;
+	}
+
 	/**
 	 * Load a {@link DataBlock} as a {@link Serializable}. The offset is given
 	 * in
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/N5Writer.java b/src/main/java/org/janelia/saalfeldlab/n5/N5Writer.java
index 4cfd52be..01606204 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/N5Writer.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/N5Writer.java
@@ -30,10 +30,17 @@
 import java.io.ObjectOutputStream;
 import java.io.Serializable;
 import java.io.UncheckedIOException;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import org.checkerframework.checker.units.qual.A;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.N5BlockCodec;
+import org.janelia.saalfeldlab.n5.shard.Shard;
+import org.janelia.saalfeldlab.n5.shard.ShardParameters;
+
 /**
  * A simple structured container API for hierarchies of chunked
  * n-dimensional datasets and attributes.
@@ -135,7 +142,7 @@ default void setDatasetAttributes(
 			final String datasetPath,
 			final DatasetAttributes datasetAttributes) throws N5Exception {
 
-		setAttributes(datasetPath, datasetAttributes.asMap());
+		setAttribute(datasetPath, "/", datasetAttributes);
 	}
 
 	/**
@@ -206,10 +213,34 @@ default void createDataset(
 		setDatasetAttributes(normalPath, datasetAttributes);
 	}
 
+
+	/**
+	 * Creates a dataset. This does not create any data but the path and
+	 * mandatory attributes only.
+	 *
+	 * @param datasetPath dataset path
+	 * @param dimensions the dataset dimensions
+	 * @param blockSize the block size
+	 * @param dataType the data type
+	 * @param codecs codecs to encode/decode with
+	 * @throws N5Exception the exception
+	 */
+	default void createDataset(
+			final String datasetPath,
+			final long[] dimensions,
+			final int[] blockSize,
+			final DataType dataType,
+			final Codec... codecs) throws N5Exception {
+
+		createDataset(datasetPath, new DatasetAttributes(dimensions, blockSize, dataType, codecs));
+	}
+
 	/**
+	 * DEPRECATED. {@link Compression}s are {@link Codec}s.
+	 * Use {@link #createDataset(String, long[], int[], DataType, Codec...)}
+	 * <p> </p>
 	 * Creates a dataset. This does not create any data but the path and
-	 * mandatory
-	 * attributes only.
+	 * mandatory attributes only.
 	 *
 	 * @param datasetPath dataset path
 	 * @param dimensions the dataset dimensions
@@ -218,6 +249,7 @@ default void createDataset(
 	 * @param compression the compression
 	 * @throws N5Exception the exception
 	 */
+	@Deprecated
 	default void createDataset(
 			final String datasetPath,
 			final long[] dimensions,
@@ -225,7 +257,7 @@ default void createDataset(
 			final DataType dataType,
 			final Compression compression) throws N5Exception {
 
-		createDataset(datasetPath, new DatasetAttributes(dimensions, blockSize, dataType, compression));
+		createDataset(datasetPath, dimensions, blockSize, dataType, new N5BlockCodec(), compression);
 	}
 
 	/**
@@ -242,6 +274,39 @@ <T> void writeBlock(
 			final DatasetAttributes datasetAttributes,
 			final DataBlock<T> dataBlock) throws N5Exception;
 
+	/**
+	 * Write multiple data blocks, useful for request aggregation .
+	 *
+	 * @param datasetPath dataset path
+	 * @param datasetAttributes the dataset attributes
+	 * @param dataBlocks the data block
+	 * @param <T> the data block data type
+	 * @throws N5Exception the exception
+	 */
+	default <T> void writeBlocks(
+			final String datasetPath,
+			final DatasetAttributes datasetAttributes,
+			final DataBlock<T>... dataBlocks) throws N5Exception {
+
+		// default method is naive
+		for (DataBlock<T> block : dataBlocks)
+			writeBlock(datasetPath, datasetAttributes, block);
+	}
+
+	/**
+	 * Writes a {@link Shard}.
+	 *
+	 * @param datasetPath dataset path
+	 * @param datasetAttributes the dataset attributes
+	 * @param shard the shard
+	 * @param <T> the data block data type
+	 * @throws N5Exception the exception
+	 */
+	<T> void writeShard(
+			final String datasetPath,
+			final DatasetAttributes datasetAttributes,
+			final Shard<T> shard) throws N5Exception;
+
 	/**
 	 * Deletes the block at {@code gridPosition}
 	 *
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/NameConfigAdapter.java b/src/main/java/org/janelia/saalfeldlab/n5/NameConfigAdapter.java
new file mode 100644
index 00000000..30e45d80
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/NameConfigAdapter.java
@@ -0,0 +1,246 @@
+/**
+ * Copyright (c) 2017, Stephan Saalfeld
+ * All rights reserved.
+ * <p>
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ * <p>
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * <p>
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.janelia.saalfeldlab.n5;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+import org.janelia.saalfeldlab.n5.serialization.N5Annotations;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+import org.scijava.annotations.Index;
+import org.scijava.annotations.IndexItem;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map.Entry;
+
+/**
+ * T adapter, auto-discovers annotated T implementations in the classpath.
+ *
+ * @author Caleb Hulbert
+ */
+public class NameConfigAdapter<T> implements JsonDeserializer<T>, JsonSerializer<T> {
+
+	private static HashMap<Class<?>, NameConfigAdapter<?>> adapters = new HashMap<>();
+
+	private static <V> void registerAdapter(Class<V> cls) {
+
+		adapters.put(cls, new NameConfigAdapter<>(cls));
+		update(adapters.get(cls));
+	}
+	private final HashMap<String, Constructor<? extends T>> constructors = new HashMap<>();
+
+	private final HashMap<String, HashMap<String, Field>> parameters = new HashMap<>();
+	private final HashMap<String, HashMap<String, String>> parameterNames = new HashMap<>();
+	private static ArrayList<Field> getDeclaredFields(Class<?> clazz) {
+
+		final ArrayList<Field> fields = new ArrayList<>();
+		fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
+		for (clazz = clazz.getSuperclass(); clazz != null; clazz = clazz.getSuperclass())
+			fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
+		return fields;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static synchronized <T> void update(final NameConfigAdapter<T> adapter) {
+
+		final String prefix = adapter.type.getAnnotation(NameConfig.Prefix.class).value();
+		final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+		final Index<NameConfig.Name> annotationIndex = Index.load(NameConfig.Name.class, classLoader);
+		for (final IndexItem<NameConfig.Name> item : annotationIndex) {
+			Class<T> clazz;
+			try {
+				clazz = (Class<T>)Class.forName(item.className());
+				final String name = clazz.getAnnotation(NameConfig.Name.class).value();
+				final String type = prefix + "." + name;
+
+				final Constructor<T> constructor = clazz.getDeclaredConstructor();
+
+				final HashMap<String, Field> parameters = new HashMap<>();
+				final HashMap<String, String> parameterNames = new HashMap<>();
+				final ArrayList<Field> fields = getDeclaredFields(clazz);
+				for (final Field field : fields) {
+					final NameConfig.Parameter parameter = field.getAnnotation(NameConfig.Parameter.class);
+					if (parameter != null) {
+
+						final String parameterName;
+						if (parameter.value().equals(""))
+							parameterName = field.getName();
+						else
+							parameterName = parameter.value();
+
+						parameterNames.put(field.getName(), parameterName);
+
+						parameters.put(field.getName(), field);
+					}
+				}
+
+				adapter.constructors.put(type, constructor);
+				adapter.parameters.put(type, parameters);
+				adapter.parameterNames.put(type, parameterNames);
+			} catch (final ClassNotFoundException | NoSuchMethodException | ClassCastException
+						   | UnsatisfiedLinkError e) {
+				System.err.println("T '" + item.className() + "' could not be registered");
+			}
+		}
+	}
+
+	private final Class<T> type;
+
+	public NameConfigAdapter(Class<T> cls) {
+		this.type = cls;
+	}
+
+	@Override
+	public JsonElement serialize(
+			final T object,
+			final Type typeOfSrc,
+			final JsonSerializationContext context) {
+
+		final Class<T> clazz = (Class<T>)object.getClass();
+
+		final String name = clazz.getAnnotation(NameConfig.Name.class).value();
+		final String prefix = type.getAnnotation(NameConfig.Prefix.class).value();
+		final String type = prefix + "." + name;
+
+		final JsonObject json = new JsonObject();
+		json.addProperty("name", name);
+		final JsonObject configuration = new JsonObject();
+
+		final HashMap<String, Field> parameterTypes = parameters.get(type);
+		final HashMap<String, String> parameterNameMap = parameterNames.get(type);
+		try {
+			for (final Entry<String, Field> parameterType : parameterTypes.entrySet()) {
+				final String fieldName = parameterType.getKey();
+				final Field field = clazz.getDeclaredField(fieldName);
+				final boolean isAccessible = field.isAccessible();
+				field.setAccessible(true);
+				final Object value = field.get(object);
+				field.setAccessible(isAccessible);
+				final JsonElement serialized = context.serialize(value);
+				if (field.getAnnotation(N5Annotations.ReverseArray.class) != null) {
+					final JsonArray reversedArray = reverseJsonArray(serialized.getAsJsonArray());
+					configuration.add(parameterNameMap.get(fieldName), reversedArray);
+				} else
+					configuration.add(parameterNameMap.get(fieldName), serialized);
+
+			}
+			if (!configuration.isEmpty())
+				json.add("configuration", configuration);
+		} catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) {
+			e.printStackTrace(System.err);
+			return null;
+		}
+
+		return json;
+	}
+
+	@Override
+	public T deserialize(
+			final JsonElement json,
+			final Type typeOfT,
+			final JsonDeserializationContext context) throws JsonParseException {
+
+		final String prefix = type.getAnnotation(NameConfig.Prefix.class).value();
+
+		final JsonObject objectJson = json.getAsJsonObject();
+		final String name = objectJson.getAsJsonPrimitive("name").getAsString();
+		if (name == null) {
+			return null;
+		}
+
+		final String type = prefix + "." + name;
+
+		final JsonObject configuration = objectJson.getAsJsonObject("configuration");
+		/* It's ok to be null if all parameters are optional.
+		* Otherwise, return*/
+		if (configuration == null) {
+			for (final Field field : parameters.get(type).values()) {
+				if (!field.getAnnotation(NameConfig.Parameter.class).optional())
+					return null;
+			}
+		}
+
+		final Constructor<? extends T> constructor = constructors.get(type);
+		constructor.setAccessible(true);
+		final T object;
+		try {
+			object = constructor.newInstance();
+			final HashMap<String, Field> parameterTypes = parameters.get(type);
+			final HashMap<String, String> parameterNameMap = parameterNames.get(type);
+			for (final Entry<String, Field> parameterType : parameterTypes.entrySet()) {
+				final String fieldName = parameterType.getKey();
+				final String paramName = parameterNameMap.get(fieldName);
+				final JsonElement paramJson = configuration == null ? null : configuration.get(paramName);
+				final Field field = parameterType.getValue();
+				if (paramJson != null) {
+					final Object parameter;
+					if (field.getAnnotation(N5Annotations.ReverseArray.class) != null) {
+						final JsonArray reversedArray = reverseJsonArray(paramJson);
+						parameter = context.deserialize(reversedArray, field.getType());
+					} else
+						parameter = context.deserialize(paramJson, field.getType());
+					ReflectionUtils.setFieldValue(object, fieldName, parameter);
+				} else if (!field.getAnnotation(NameConfig.Parameter.class).optional()) {
+					/* if param is null, and not optional, return null */
+					return null;
+				}
+			}
+		} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException
+				 | SecurityException | NoSuchFieldException e) {
+			e.printStackTrace(System.err);
+			return null;
+		}
+
+		return object;
+	}
+
+	private static JsonArray reverseJsonArray(JsonElement paramJson) {
+
+		final JsonArray reversedJson = new JsonArray(paramJson.getAsJsonArray().size());
+		for (int i = paramJson.getAsJsonArray().size() - 1; i >= 0; i--) {
+			reversedJson.add(paramJson.getAsJsonArray().get(i));
+		}
+		return reversedJson;
+	}
+
+	public static <T> NameConfigAdapter<T> getJsonAdapter(Class<T> cls) {
+
+		if (adapters.get(cls) == null)
+			registerAdapter(cls);
+		return (NameConfigAdapter<T>) adapters.get(cls);
+	}
+}
\ No newline at end of file
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/RawCompression.java b/src/main/java/org/janelia/saalfeldlab/n5/RawCompression.java
index ffa674fc..ebd58b38 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/RawCompression.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/RawCompression.java
@@ -30,6 +30,7 @@
 import java.io.OutputStream;
 
 import org.janelia.saalfeldlab.n5.Compression.CompressionType;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
 
 @CompressionType("raw")
 public class RawCompression implements DefaultBlockReader, DefaultBlockWriter, Compression {
@@ -37,17 +38,30 @@ public class RawCompression implements DefaultBlockReader, DefaultBlockWriter, C
 	private static final long serialVersionUID = 7526445806847086477L;
 
 	@Override
-	public InputStream getInputStream(final InputStream in) throws IOException {
+	public InputStream decode(final InputStream in) throws IOException {
 
 		return in;
 	}
 
 	@Override
-	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+	public InputStream getInputStream(final InputStream in) throws IOException {
+
+		return decode(in);
+	}
+
+
+	@Override
+	public OutputStream encode(final OutputStream out) throws IOException {
 
 		return out;
 	}
 
+	@Override
+	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+
+		return encode(out);
+	}
+
 	@Override
 	public RawCompression getReader() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/ShardedDatasetAttributes.java b/src/main/java/org/janelia/saalfeldlab/n5/ShardedDatasetAttributes.java
new file mode 100644
index 00000000..335c252e
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/ShardedDatasetAttributes.java
@@ -0,0 +1,124 @@
+package org.janelia.saalfeldlab.n5;
+
+import java.util.Arrays;
+
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.Codec.ArrayCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec.BytesCodec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardParameters;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+
+@Deprecated
+public class ShardedDatasetAttributes extends DatasetAttributes implements ShardParameters {
+
+	private static final long serialVersionUID = -4559068841006651814L;
+
+	private final int[] shardSize;
+
+	private final ShardingCodec shardingCodec;
+
+	public ShardedDatasetAttributes (
+			final long[] dimensions,
+			final int[] shardSize, //in pixels
+			final int[] blockSize, //in pixels
+			final DataType dataType,
+			final Codec[] blocksCodecs,
+			final DeterministicSizeCodec[] indexCodecs,
+			final IndexLocation indexLocation
+	) {
+		//TODO Caleb: Can we just let the super codecs() return this ShardCodec?
+		super(dimensions, blockSize, dataType, blocksCodecs);
+
+		if (!validateShardBlockSize(shardSize, blockSize)) {
+			throw new N5Exception(String.format("Invalid shard %s / block size %s",
+					Arrays.toString(shardSize),
+					Arrays.toString(blockSize)));
+		}
+
+		this.shardSize = shardSize;
+		this.shardingCodec = new ShardingCodec(
+				blockSize,
+				blocksCodecs,
+				indexCodecs,
+				indexLocation
+		);
+	}
+
+	public ShardedDatasetAttributes(
+			final long[] dimensions,
+			final int[] shardSize, //in pixels
+			final int[] blockSize, //in pixels
+			final DataType dataType,
+			final ShardingCodec codec) {
+		super(dimensions, blockSize, dataType, null, null);
+		this.shardSize = shardSize;
+		this.shardingCodec = codec;
+	}
+
+	/**
+	 * Returns whether the given shard and block sizes are valid. Specifically, is
+	 * the shard size a multiple of the block size in every dimension.
+	 *
+	 * @param shardSize size of the shard in pixels
+	 * @param blockSize size of a block in pixels
+	 * @return
+	 */
+	public static boolean validateShardBlockSize(final int[] shardSize, final int[] blockSize) {
+
+		if (shardSize.length != blockSize.length)
+			return false;
+
+		for (int i = 0; i < shardSize.length; i++) {
+			if (shardSize[i] % blockSize[i] != 0)
+				return false;
+		}
+		return true;
+	}
+
+	public ShardingCodec getShardingCodec() {
+		return shardingCodec;
+	}
+
+	@Override public ArrayCodec getArrayCodec() {
+
+		return shardingCodec.getArrayCodec();
+	}
+
+	@Override public BytesCodec[] getCodecs() {
+
+		return shardingCodec.getCodecs();
+	}
+
+	@Override
+	protected Codec[] concatenateCodecs() {
+
+		return new Codec[] { shardingCodec };
+	}
+
+	public IndexLocation getIndexLocation() {
+
+		return getShardingCodec().getIndexLocation();
+	}
+
+	/**
+	 * The size of the blocks in pixel units.
+	 *
+	 * @return the number of pixels per dimension for this shard.
+	 */
+	@Override
+	public int[] getShardSize() {
+
+		return shardSize;
+	}
+
+	public static int[] getBlockSize(Codec[] codecs) {
+
+		for (final Codec codec : codecs)
+			if (codec instanceof ShardingCodec)
+				return ((ShardingCodec)codec).getBlockSize();
+
+		return null;
+	}
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/ShortArrayDataBlock.java b/src/main/java/org/janelia/saalfeldlab/n5/ShortArrayDataBlock.java
index 2dbf6b17..c7d141f3 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/ShortArrayDataBlock.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/ShortArrayDataBlock.java
@@ -25,6 +25,9 @@
  */
 package org.janelia.saalfeldlab.n5;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 
 public class ShortArrayDataBlock extends AbstractDataBlock<short[]> {
@@ -48,6 +51,20 @@ public void readData(final ByteBuffer buffer) {
 		buffer.asShortBuffer().get(data);
 	}
 
+	@Override
+	public void readData(final DataInput dataInput) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			data[i] = dataInput.readShort();
+	}
+
+	@Override
+	public void writeData(final DataOutput output) throws IOException {
+
+		for (int i = 0; i < data.length; i++)
+			output.writeShort(data[i]);
+	}
+
 	@Override
 	public int getNumElements() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/XzCompression.java b/src/main/java/org/janelia/saalfeldlab/n5/XzCompression.java
index 5204e799..d2c1ce3d 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/XzCompression.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/XzCompression.java
@@ -32,8 +32,10 @@
 import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
 import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
 import org.janelia.saalfeldlab.n5.Compression.CompressionType;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
 
 @CompressionType("xz")
+@NameConfig.Name("xz")
 public class XzCompression implements DefaultBlockReader, DefaultBlockWriter, Compression {
 
 	private static final long serialVersionUID = -7272153943564743774L;
@@ -52,17 +54,29 @@ public XzCompression() {
 	}
 
 	@Override
-	public InputStream getInputStream(final InputStream in) throws IOException {
+	public InputStream decode(final InputStream in) throws IOException {
 
 		return new XZCompressorInputStream(in);
 	}
 
 	@Override
-	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+	public InputStream getInputStream(final InputStream in) throws IOException {
+
+		return decode(in);
+	}
+
+	@Override
+	public OutputStream encode(final OutputStream out) throws IOException {
 
 		return new XZCompressorOutputStream(out, preset);
 	}
 
+	@Override
+	public OutputStream getOutputStream(final OutputStream out) throws IOException {
+
+		return encode(out);
+	}
+
 	@Override
 	public XzCompression getReader() {
 
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/AsTypeCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/AsTypeCodec.java
new file mode 100644
index 00000000..e8883c75
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/AsTypeCodec.java
@@ -0,0 +1,401 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.function.BiConsumer;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+@NameConfig.Name(AsTypeCodec.TYPE)
+public class AsTypeCodec implements Codec.BytesCodec {
+
+	private static final long serialVersionUID = 1031322606191894484L;
+
+	public static final String TYPE = "astype";
+
+	protected transient int numBytes;
+	protected transient int numEncodedBytes;
+
+	protected transient BiConsumer<ByteBuffer, ByteBuffer> encoder;
+	protected transient BiConsumer<ByteBuffer, ByteBuffer> decoder;
+
+	@NameConfig.Parameter
+	protected final DataType dataType;
+
+	@NameConfig.Parameter
+	protected final DataType encodedType;
+
+	private AsTypeCodec() {
+
+		this(null, null);
+	}
+
+	public AsTypeCodec(DataType dataType, DataType encodedType) {
+
+		this.dataType = dataType;
+		this.encodedType = encodedType;
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+	public DataType getDataType() {
+
+		return dataType;
+	}
+
+	public DataType getEncodedDataType() {
+
+		return encodedType;
+	}
+
+	@Override
+	public InputStream decode(InputStream in) throws IOException {
+
+		numBytes = bytes(dataType);
+		numEncodedBytes = bytes(encodedType);
+
+		encoder = converter(dataType, encodedType);
+		decoder = converter(encodedType, dataType);
+
+		return new FixedLengthConvertedInputStream(numEncodedBytes, numBytes, decoder, in);
+	}
+
+	@Override
+	public OutputStream encode(OutputStream out) throws IOException {
+
+		numBytes = bytes(dataType);
+		numEncodedBytes = bytes(encodedType);
+
+		encoder = converter(dataType, encodedType);
+		decoder = converter(encodedType, dataType);
+
+		return new FixedLengthConvertedOutputStream(numBytes, numEncodedBytes, encoder, out);
+	}
+
+	public static int bytes(DataType type) {
+
+		switch (type) {
+		case UINT8:
+		case INT8:
+			return 1;
+		case UINT16:
+		case INT16:
+			return 2;
+		case UINT32:
+		case INT32:
+		case FLOAT32:
+			return 4;
+		case UINT64:
+		case INT64:
+		case FLOAT64:
+			return 8;
+		default:
+			return -1;
+		}
+	}
+
+	public static BiConsumer<ByteBuffer, ByteBuffer> converter(final DataType from, final DataType to) {
+
+		// // TODO fill this out
+
+		if (from == to)
+			return AsTypeCodec::IDENTITY;
+		else if (from == DataType.INT8) {
+
+			if( to == DataType.INT16 )
+				return AsTypeCodec::BYTE_TO_SHORT;
+			else if( to == DataType.INT32 )
+				return AsTypeCodec::BYTE_TO_INT;
+			else if( to == DataType.INT64 )
+				return AsTypeCodec::BYTE_TO_LONG;
+			else if( to == DataType.FLOAT32 )
+				return AsTypeCodec::BYTE_TO_FLOAT;
+			else if( to == DataType.FLOAT64 )
+				return AsTypeCodec::BYTE_TO_DOUBLE;
+
+		} else if (from == DataType.INT16) {
+
+			if (to == DataType.INT8)
+				return AsTypeCodec::SHORT_TO_BYTE;
+			else if (to == DataType.INT32)
+				return AsTypeCodec::SHORT_TO_INT;
+			else if (to == DataType.INT64)
+				return AsTypeCodec::SHORT_TO_LONG;
+			else if (to == DataType.FLOAT32)
+				return AsTypeCodec::SHORT_TO_FLOAT;
+			else if (to == DataType.FLOAT64)
+				return AsTypeCodec::SHORT_TO_DOUBLE;
+
+		} else if (from == DataType.INT32) {
+
+			if (to == DataType.INT8)
+				return AsTypeCodec::INT_TO_BYTE;
+			else if (to == DataType.INT16)
+				return AsTypeCodec::INT_TO_SHORT;
+			if (to == DataType.INT8)
+				return AsTypeCodec::INT_TO_BYTE;
+			else if (to == DataType.INT16)
+				return AsTypeCodec::INT_TO_SHORT;
+			else if (to == DataType.INT32)
+				return AsTypeCodec::IDENTITY;
+			else if (to == DataType.INT64)
+				return AsTypeCodec::INT_TO_LONG;
+			else if (to == DataType.FLOAT32)
+				return AsTypeCodec::INT_TO_FLOAT;
+			else if (to == DataType.INT64)
+				return AsTypeCodec::INT_TO_LONG;
+			else if (to == DataType.FLOAT32)
+				return AsTypeCodec::INT_TO_FLOAT;
+			else if (to == DataType.FLOAT64)
+				return AsTypeCodec::INT_TO_DOUBLE;
+
+		} else if (from == DataType.INT64) {
+
+			if (to == DataType.INT8)
+				return AsTypeCodec::LONG_TO_BYTE;
+			else if (to == DataType.INT16)
+				return AsTypeCodec::LONG_TO_SHORT;
+			else if (to == DataType.INT32)
+				return AsTypeCodec::LONG_TO_INT;
+			else if (to == DataType.FLOAT32)
+				return AsTypeCodec::LONG_TO_FLOAT;
+			else if (to == DataType.FLOAT64)
+				return AsTypeCodec::LONG_TO_DOUBLE;
+
+		} else if (from == DataType.FLOAT32) {
+
+			if (to == DataType.INT8)
+				return AsTypeCodec::FLOAT_TO_BYTE;
+			else if (to == DataType.INT16)
+				return AsTypeCodec::FLOAT_TO_SHORT;
+			else if (to == DataType.INT32)
+				return AsTypeCodec::FLOAT_TO_INT;
+			else if (to == DataType.INT64)
+				return AsTypeCodec::FLOAT_TO_LONG;
+			else if (to == DataType.FLOAT64)
+				return AsTypeCodec::FLOAT_TO_DOUBLE;
+
+		} else if (from == DataType.FLOAT64) {
+
+			if (to == DataType.INT8)
+				return AsTypeCodec::DOUBLE_TO_BYTE;
+			else if (to == DataType.INT16)
+				return AsTypeCodec::DOUBLE_TO_SHORT;
+			else if (to == DataType.INT32)
+				return AsTypeCodec::DOUBLE_TO_INT;
+			else if (to == DataType.INT64)
+				return AsTypeCodec::DOUBLE_TO_LONG;
+			else if (to == DataType.FLOAT32)
+				return AsTypeCodec::DOUBLE_TO_FLOAT;
+		}
+
+		return AsTypeCodec::IDENTITY;
+	}
+
+	public static final void IDENTITY(final ByteBuffer x, final ByteBuffer y) {
+
+		for (int i = 0; i < y.capacity(); i++)
+			y.put(x.get());
+	}
+
+	public static final void IDENTITY_ONE(final ByteBuffer x, final ByteBuffer y) {
+
+		y.put(x.get());
+	}
+
+	public static final void BYTE_TO_SHORT(final ByteBuffer b, final ByteBuffer s) {
+
+		final byte zero = 0;
+		s.put(zero);
+		s.put(b.get());
+	}
+
+	public static final void BYTE_TO_INT(final ByteBuffer b, final ByteBuffer i) {
+
+		final byte zero = 0;
+		i.put(zero);
+		i.put(zero);
+		i.put(zero);
+		i.put(b.get());
+	}
+
+	public static final void BYTE_TO_LONG(final ByteBuffer b, final ByteBuffer l) {
+
+		final byte zero = 0;
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(b.get());
+	}
+
+	public static final void BYTE_TO_FLOAT(final ByteBuffer b, final ByteBuffer f) {
+
+		f.putFloat((float)b.get());
+	}
+
+	public static final void BYTE_TO_DOUBLE(final ByteBuffer b, final ByteBuffer d) {
+
+		d.putDouble((double)b.get());
+	}
+
+	public static final void SHORT_TO_BYTE(final ByteBuffer s, final ByteBuffer b) {
+
+		final byte zero = 0;
+		b.put(zero);
+		b.put(s.get());
+	}
+
+	public static final void SHORT_TO_INT(final ByteBuffer s, final ByteBuffer i) {
+
+		final byte zero = 0;
+		i.put(zero);
+		i.put(zero);
+		i.put(s.get());
+		i.put(s.get());
+	}
+
+	public static final void SHORT_TO_LONG(final ByteBuffer s, final ByteBuffer l) {
+
+		final byte zero = 0;
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(s.get());
+		l.put(s.get());
+	}
+
+	public static final void SHORT_TO_FLOAT(final ByteBuffer s, final ByteBuffer f) {
+
+		f.putFloat((float)s.getShort());
+	}
+
+	public static final void SHORT_TO_DOUBLE(final ByteBuffer s, final ByteBuffer d) {
+
+		d.putDouble((double)s.getShort());
+	}
+
+	public static final void INT_TO_BYTE(final ByteBuffer i, final ByteBuffer b) {
+
+		b.put(i.get(3));
+	}
+
+	public static final void INT_TO_SHORT(final ByteBuffer i, final ByteBuffer s) {
+
+		s.put(i.get(2));
+		s.put(i.get(3));
+	}
+
+	public static final void INT_TO_LONG(final ByteBuffer i, final ByteBuffer l) {
+
+		final byte zero = 0;
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(zero);
+		l.put(i.get());
+		l.put(i.get());
+		l.put(i.get());
+		l.put(i.get());
+	}
+
+	public static final void INT_TO_FLOAT(final ByteBuffer i, final ByteBuffer f) {
+
+		f.putFloat((float)i.getInt());
+	}
+
+	public static final void INT_TO_DOUBLE(final ByteBuffer i, final ByteBuffer f) {
+
+		f.putDouble((float)i.getInt());
+	}
+
+	public static final void LONG_TO_BYTE(final ByteBuffer l, final ByteBuffer b) {
+
+		b.put((byte)l.getLong());
+	}
+
+	public static final void LONG_TO_SHORT(final ByteBuffer l, final ByteBuffer s) {
+
+		s.putShort((short)l.getLong());
+	}
+
+	public static final void LONG_TO_INT(final ByteBuffer l, final ByteBuffer i) {
+
+		i.putInt((int)l.getLong());
+	}
+
+	public static final void LONG_TO_FLOAT(final ByteBuffer l, final ByteBuffer f) {
+
+		f.putFloat((float)l.getLong());
+	}
+
+	public static final void LONG_TO_DOUBLE(final ByteBuffer l, final ByteBuffer f) {
+
+		f.putDouble((float)l.getLong());
+	}
+
+	public static final void FLOAT_TO_BYTE(final ByteBuffer f, final ByteBuffer b) {
+
+		b.put((byte)f.getFloat());
+	}
+
+	public static final void FLOAT_TO_SHORT(final ByteBuffer f, final ByteBuffer s) {
+
+		s.putShort((short)f.getFloat());
+	}
+
+	public static final void FLOAT_TO_INT(final ByteBuffer f, final ByteBuffer i) {
+
+		i.putInt((int)f.getFloat());
+	}
+
+	public static final void FLOAT_TO_LONG(final ByteBuffer f, final ByteBuffer l) {
+
+		l.putLong((long)f.getFloat());
+	}
+
+	public static final void FLOAT_TO_DOUBLE(final ByteBuffer f, final ByteBuffer d) {
+
+		d.putDouble((double)f.getFloat());
+	}
+
+	public static final void DOUBLE_TO_BYTE(final ByteBuffer d, final ByteBuffer b) {
+
+		b.put((byte)d.getDouble());
+	}
+
+	public static final void DOUBLE_TO_SHORT(final ByteBuffer d, final ByteBuffer s) {
+
+		s.putShort((short)d.getDouble());
+	}
+
+	public static final void DOUBLE_TO_INT(final ByteBuffer d, final ByteBuffer i) {
+
+		i.putInt((int)d.getDouble());
+	}
+
+	public static final void DOUBLE_TO_LONG(final ByteBuffer d, final ByteBuffer l) {
+
+		l.putLong((long)d.getDouble());
+	}
+
+	public static final void DOUBLE_TO_FLOAT(final ByteBuffer d, final ByteBuffer f) {
+
+		f.putFloat((float)d.getDouble());
+	}
+
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/Codec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/Codec.java
new file mode 100644
index 00000000..ed544257
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/Codec.java
@@ -0,0 +1,180 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.io.UncheckedIOException;
+import java.util.Arrays;
+
+import org.apache.commons.io.input.ProxyInputStream;
+import org.apache.commons.io.output.ProxyOutputStream;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.LockedChannel;
+import org.janelia.saalfeldlab.n5.N5Exception;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+/**
+ * Interface representing a filter can encode a {@link OutputStream}s when writing data, and decode
+ * the {@link InputStream}s when reading data.
+ * <p>
+ * Modeled after <a href="https://zarr.readthedocs.io/en/v2.0.1/api/codecs.html">Filters</a> in
+ * Zarr.
+ */
+@NameConfig.Prefix("codec")
+public interface Codec extends Serializable {
+
+	static OutputStream encode(OutputStream out, Codec.BytesCodec... bytesCodecs) throws IOException {
+		OutputStream stream = out;
+		for (final BytesCodec codec : bytesCodecs)
+			stream = codec.encode(stream);
+
+		return stream;
+	}
+
+	static InputStream decode(InputStream out, Codec.BytesCodec... bytesCodecs) throws IOException {
+		InputStream stream = out;
+		for (final BytesCodec codec : bytesCodecs)
+			stream = codec.decode(stream);
+
+		return stream;
+	}
+
+	interface BytesCodec extends Codec {
+
+		/**
+		 * Decode an {@link InputStream}.
+		 *
+		 * @param in input stream
+		 * @return the decoded input stream
+		 */
+		InputStream decode(final InputStream in) throws IOException;
+
+		/**
+		 * Encode an {@link OutputStream}.
+		 *
+		 * @param out the output stream
+		 * @return the encoded output stream
+		 */
+		OutputStream encode(final OutputStream out) throws IOException;
+	}
+
+	interface ArrayCodec extends DeterministicSizeCodec {
+
+		default long[] getPositionForBlock(final DatasetAttributes attributes, final DataBlock<?> datablock) {
+
+			return datablock.getGridPosition();
+		}
+
+		default long[] getPositionForBlock(final DatasetAttributes attributes, final long... blockPosition) {
+
+			return blockPosition;
+		}
+		/**
+		 * Decode an {@link InputStream}.
+		 *
+		 * @param in input stream
+		 * @return the DataBlock corresponding to the input stream
+		 */
+		DataBlockInputStream decode(
+				final DatasetAttributes attributes,
+				final long[] gridPosition,
+				final InputStream in) throws IOException;
+
+		/**
+		 * Encode a {@link DataBlock}.
+		 *
+		 * @param datablock the datablock to encode
+		 */
+		DataBlockOutputStream encode(
+				final DatasetAttributes attributes,
+				final DataBlock<?> datablock,
+				final OutputStream out) throws IOException;
+
+		@Override default long encodedSize(long size) {
+
+			return size;
+		}
+
+		@Override default long decodedSize(long size) {
+
+			return size;
+		}
+		default <T> void writeBlock(
+				final KeyValueAccess kva,
+				final String keyPath,
+				final DatasetAttributes datasetAttributes,
+				final DataBlock<T> dataBlock) {
+
+			try (final LockedChannel lock = kva.lockForWriting(keyPath)) {
+				try (final OutputStream out = lock.newOutputStream()) {
+					final DataBlockOutputStream dataBlockOutput = encode(datasetAttributes, dataBlock, out);
+					try (final OutputStream stream = Codec.encode(dataBlockOutput, datasetAttributes.getCodecs())) {
+						dataBlock.writeData(dataBlockOutput.getDataOutput(stream));
+					}
+				}
+			} catch (final IOException | UncheckedIOException e) {
+				final String msg = "Failed to write block " + Arrays.toString(dataBlock.getGridPosition()) + " into dataset " + keyPath;
+				throw new N5Exception.N5IOException( msg, e);
+			}
+		}
+
+		default <T> DataBlock<T> readBlock(
+				final KeyValueAccess kva,
+				final String keyPath,
+				final DatasetAttributes datasetAttributes,
+				final long[] gridPosition) {
+
+			try (final LockedChannel lockedChannel = kva.lockForReading(keyPath)) {
+				try(final InputStream in = lockedChannel.newInputStream()) {
+
+					final BytesCodec[] codecs = datasetAttributes.getCodecs();
+					final ArrayCodec arrayCodec = datasetAttributes.getArrayCodec();
+					final DataBlockInputStream dataBlockStream = arrayCodec.decode(datasetAttributes, gridPosition, in);
+					InputStream stream = Codec.decode(dataBlockStream, codecs);
+
+					final DataBlock<T> dataBlock = dataBlockStream.allocateDataBlock();
+					dataBlock.readData(dataBlockStream.getDataInput(stream));
+					stream.close();
+
+					return dataBlock;
+				}
+			} catch (final N5Exception.N5NoSuchKeyException e) {
+				return null;
+			} catch (final IOException | UncheckedIOException e) {
+				final String msg = "Failed to read block " + Arrays.toString(gridPosition) + " from dataset " + keyPath;
+				throw new N5Exception.N5IOException( msg, e);
+			}
+		}
+	}
+
+	abstract class DataBlockInputStream extends ProxyInputStream {
+
+
+		protected DataBlockInputStream(InputStream in) {
+
+			super(in);
+		}
+
+		public abstract <T> DataBlock<T> allocateDataBlock() throws IOException;
+
+		public abstract DataInput getDataInput(final InputStream inputStream);
+	}
+
+	abstract class DataBlockOutputStream extends ProxyOutputStream {
+
+		protected DataBlockOutputStream(final OutputStream out) {
+
+			super(out);
+		}
+
+		public abstract DataOutput getDataOutput(final OutputStream outputStream);
+	}
+
+	String getType();
+}
+
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/DeterministicSizeCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/DeterministicSizeCodec.java
new file mode 100644
index 00000000..9ac0a1fe
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/DeterministicSizeCodec.java
@@ -0,0 +1,13 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+/**
+ * A {@link Codec} that can deterministically determine the size of encoded data from the size of the raw data and vice versa from the data length alone (i.e. encoding is data
+ * independent).
+ */
+public interface DeterministicSizeCodec extends Codec {
+
+	public abstract long encodedSize(long size);
+
+	public abstract long decodedSize(long size);
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedLengthConvertedInputStream.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedLengthConvertedInputStream.java
new file mode 100644
index 00000000..78d6313a
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedLengthConvertedInputStream.java
@@ -0,0 +1,71 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.function.BiConsumer;
+
+/**
+ * An {@link InputStream} that converts between two fixed-length types.
+ */
+public class FixedLengthConvertedInputStream extends InputStream {
+
+	private final int numBytes;
+	private final int numBytesAfterDecoding;
+
+	private final byte[] raw;
+	private final byte[] decoded;
+
+	private final ByteBuffer rawBuffer;
+	private final ByteBuffer decodedBuffer;
+
+	private final InputStream src;
+
+	private BiConsumer<ByteBuffer, ByteBuffer> converter;
+
+	private int incrememntalBytesRead;
+
+	public FixedLengthConvertedInputStream(
+			final int numBytes,
+			final int numBytesAfterDecoding,
+			BiConsumer<ByteBuffer, ByteBuffer> converter,
+			final InputStream src ) {
+
+		this.numBytes = numBytes;
+		this.numBytesAfterDecoding = numBytesAfterDecoding;
+		this.converter = converter;
+
+		raw = new byte[numBytes];
+		decoded = new byte[numBytesAfterDecoding];
+		incrememntalBytesRead = 0;
+
+		rawBuffer = ByteBuffer.wrap(raw);
+		decodedBuffer = ByteBuffer.wrap(decoded);
+
+		this.src = src;
+	}
+
+	@Override
+	public int read() throws IOException {
+
+		// TODO not sure if this always reads enough bytes
+		// int n = src.read(toEncode);
+		if (incrememntalBytesRead == 0) {
+
+			rawBuffer.rewind();
+			decodedBuffer.rewind();
+
+			for (int i = 0; i < numBytes; i++)
+				raw[i] = (byte)src.read();
+
+			converter.accept(rawBuffer, decodedBuffer);
+		}
+
+		final int out = decoded[incrememntalBytesRead++];
+		if (incrememntalBytesRead == numBytesAfterDecoding)
+			incrememntalBytesRead = 0;
+
+		return out;
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedLengthConvertedOutputStream.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedLengthConvertedOutputStream.java
new file mode 100644
index 00000000..87544fc7
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedLengthConvertedOutputStream.java
@@ -0,0 +1,64 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.function.BiConsumer;
+
+/*
+ * An {@link OutputStream} that converts between two fixed-length types.
+ */
+public class FixedLengthConvertedOutputStream extends OutputStream {
+
+	private final int numBytes;
+
+	private final byte[] raw;
+	private final byte[] encoded;
+
+	private final ByteBuffer rawBuffer;
+	private final ByteBuffer encodedBuffer;
+
+	private final OutputStream src;
+
+	private BiConsumer<ByteBuffer, ByteBuffer> converter;
+
+	private int incrememntalBytesWritten;
+
+	public FixedLengthConvertedOutputStream(
+			final int numBytes,
+			final int numBytesAfterEncoding,
+			final BiConsumer<ByteBuffer, ByteBuffer> converter,
+			final OutputStream src ) {
+
+		this.numBytes = numBytes;
+		this.converter = converter;
+
+		raw = new byte[numBytes];
+		encoded = new byte[numBytesAfterEncoding];
+
+		rawBuffer = ByteBuffer.wrap(raw);
+		encodedBuffer = ByteBuffer.wrap(encoded);
+
+		incrememntalBytesWritten = 0;
+
+		this.src = src;
+	}
+
+	@Override
+	public void write(int b) throws IOException {
+
+		raw[incrememntalBytesWritten++] = (byte)b;
+
+		// write out the encoded bytes after writing numBytes bytes
+		if (incrememntalBytesWritten == numBytes) {
+
+			rawBuffer.rewind();
+			encodedBuffer.rewind();
+
+			converter.accept(rawBuffer, encodedBuffer);
+			src.write(encoded);
+			incrememntalBytesWritten = 0;
+		}
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedScaleOffsetCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedScaleOffsetCodec.java
new file mode 100644
index 00000000..e6c83163
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/FixedScaleOffsetCodec.java
@@ -0,0 +1,114 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.function.BiConsumer;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+@NameConfig.Name(FixedScaleOffsetCodec.TYPE)
+public class FixedScaleOffsetCodec extends AsTypeCodec {
+
+	private static final long serialVersionUID = 8024945290803548528L;
+
+	public static transient final String TYPE = "fixedscaleoffset";
+
+	@NameConfig.Parameter
+	protected final double scale;
+
+	@NameConfig.Parameter
+	protected final double offset;
+
+	private transient ByteBuffer tmpEncoder;
+	private transient ByteBuffer tmpDecoder;
+
+	public transient BiConsumer<ByteBuffer, ByteBuffer> encoder;
+	public transient BiConsumer<ByteBuffer, ByteBuffer> encoderPre;
+	public transient BiConsumer<ByteBuffer, ByteBuffer> encoderPost;
+	public transient BiConsumer<ByteBuffer, ByteBuffer> decoder;
+	public transient BiConsumer<ByteBuffer, ByteBuffer> decoderPre;
+	public transient BiConsumer<ByteBuffer, ByteBuffer> decoderPost;
+
+	private FixedScaleOffsetCodec() {
+
+		this(1, 0, null, null);
+	}
+
+	public FixedScaleOffsetCodec(final double scale, final double offset, DataType type, DataType encodedType) {
+
+		super(type, encodedType);
+		this.scale = scale;
+		this.offset = offset;
+
+		tmpEncoder = ByteBuffer.wrap(new byte[Double.BYTES]);
+		tmpDecoder = ByteBuffer.wrap(new byte[Double.BYTES]);
+
+		// encoder goes from type to encoded type
+		encoderPre = converter(type, DataType.FLOAT64);
+		encoderPost = converter(DataType.FLOAT64, encodedType);
+
+		// decoder goes from encoded type to type
+		decoderPre = converter(encodedType, DataType.FLOAT64);
+		decoderPost = converter(DataType.FLOAT64, type);
+
+		// convert from i type to double, apply scale and offset, then convert to type o
+		encoder = (i, o) -> {
+			tmpEncoder.rewind();
+			encoderPre.accept(i, tmpEncoder);
+			tmpEncoder.rewind();
+			final double x = tmpEncoder.getDouble();
+			tmpEncoder.rewind();
+			tmpEncoder.putDouble(scale * x + offset);
+			tmpEncoder.rewind();
+			encoderPost.accept(tmpEncoder, o);
+		};
+
+		// convert from i type to double, apply scale and offset, then convert to type o
+		decoder = (i, o) -> {
+			tmpDecoder.rewind();
+			decoderPre.accept(i, tmpDecoder);
+			tmpDecoder.rewind();
+			final double x = tmpDecoder.getDouble();
+			tmpDecoder.rewind();
+			tmpDecoder.putDouble((x - offset) / scale);
+			tmpDecoder.rewind();
+			decoderPost.accept(tmpDecoder, o);
+		};
+	}
+
+	public double getScale() {
+
+		return scale;
+	}
+
+	public double getOffset() {
+
+		return offset;
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+	@Override
+	public InputStream decode(InputStream in) throws IOException {
+
+		numBytes = bytes(dataType);
+		numEncodedBytes = bytes(encodedType);
+		return new FixedLengthConvertedInputStream(numEncodedBytes, numBytes, this.decoder, in);
+	}
+
+	@Override
+	public OutputStream encode(OutputStream out) throws IOException {
+
+		numBytes = bytes(dataType);
+		numEncodedBytes = bytes(encodedType);
+		return new FixedLengthConvertedOutputStream(numBytes, numEncodedBytes, this.encoder, out);
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/IdentityCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/IdentityCodec.java
new file mode 100644
index 00000000..93a384dd
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/IdentityCodec.java
@@ -0,0 +1,34 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+@NameConfig.Name(IdentityCodec.TYPE)
+public class IdentityCodec implements Codec.BytesCodec {
+
+	private static final long serialVersionUID = 8354269325800855621L;
+
+	public static final String TYPE = "id";
+
+	@Override
+	public InputStream decode(InputStream in) throws IOException {
+
+		return in;
+	}
+
+	@Override
+	public OutputStream encode(OutputStream out) throws IOException {
+
+		return out;
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/N5BlockCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/N5BlockCodec.java
new file mode 100644
index 00000000..6b83f466
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/N5BlockCodec.java
@@ -0,0 +1,202 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteOrder;
+
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+import com.google.common.io.LittleEndianDataInputStream;
+import com.google.common.io.LittleEndianDataOutputStream;
+
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
+
+@NameConfig.Name(value = N5BlockCodec.TYPE)
+public class N5BlockCodec implements Codec.ArrayCodec {
+
+	private static final long serialVersionUID = 3523505403978222360L;
+
+	public static final String TYPE = "n5bytes";
+	public static final int MODE_DEFAULT = 0;
+	public static final int MODE_VARLENGTH = 1;
+	public static final int MODE_OBJECT = 2;
+
+	@Nullable
+	@NameConfig.Parameter(value = "endian", optional = true)
+	protected final ByteOrder byteOrder;
+
+	public N5BlockCodec() {
+
+		this(ByteOrder.BIG_ENDIAN);
+	}
+
+	public N5BlockCodec(@Nullable final ByteOrder byteOrder) {
+
+		this.byteOrder = byteOrder;
+	}
+
+	/**
+	 * ByteOrder used to encode/decode this block of data.<br>
+	 * Will be `null` when {@link DatasetAttributes#getDataType()} refers to a single-byte type,
+	 *
+	 * @return the byte order for this codec
+	 */
+	@CheckForNull
+	public ByteOrder getByteOrder() {
+		return byteOrder;
+	}
+
+	@Override public DataBlockInputStream decode(final DatasetAttributes attributes, final long[] gridPosition, InputStream in) throws IOException {
+
+		return new N5DataBlockInputStream(in, attributes, gridPosition, byteOrder);
+	}
+
+
+	@Override
+	public DataBlockOutputStream encode(final DatasetAttributes attributes, final DataBlock<?> dataBlock,
+			final OutputStream out)
+			throws IOException {
+
+		return new N5DataBlockOutputStream(out, attributes, dataBlock, byteOrder);
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+	private static class N5DataBlockOutputStream extends DataBlockOutputStream {
+
+		private final DatasetAttributes attributes;
+		private final DataBlock<?> dataBlock;
+		private final ByteOrder byteOrder;
+		boolean start = true;
+
+
+		public N5DataBlockOutputStream(final OutputStream out, final DatasetAttributes attributes, final DataBlock<?> dataBlock, ByteOrder byteOrder) {
+			super(out);
+			this.attributes = attributes;
+			this.dataBlock = dataBlock;
+			this.byteOrder = byteOrder;
+		}
+
+		@Override
+		protected void beforeWrite(int n) throws IOException {
+
+			if (start) {
+				writeHeader();
+				start = false;
+			}
+		}
+
+		private void writeHeader() throws IOException {
+			final DataOutput dos = getDataOutput(out);
+
+			final int mode;
+			if (attributes.getDataType() == DataType.OBJECT || dataBlock.getSize() == null)
+				mode = MODE_OBJECT;
+			else if (dataBlock.getNumElements() == DataBlock.getNumElements(dataBlock.getSize()))
+				mode = MODE_DEFAULT;
+			else
+				mode = MODE_VARLENGTH;
+
+			dos.writeShort(mode);
+
+			if (mode != MODE_OBJECT) {
+				dos.writeShort(attributes.getNumDimensions());
+				for (final int size : dataBlock.getSize())
+					dos.writeInt(size);
+			}
+
+			if (mode != MODE_DEFAULT)
+				dos.writeInt(dataBlock.getNumElements());
+		}
+
+		@Override
+		public DataOutput getDataOutput(final OutputStream outputStream) {
+
+			if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
+				return new DataOutputStream(outputStream);
+			else
+				return new LittleEndianDataOutputStream(outputStream);
+		}
+	}
+
+	private static class N5DataBlockInputStream extends DataBlockInputStream {
+		private final DatasetAttributes attributes;
+		private final long[] gridPosition;
+		private final ByteOrder byteOrder;
+
+			private short mode = -1;
+			private int[] blockSize = null;
+			private int numElements = -1;
+
+			private boolean start = true;
+
+		N5DataBlockInputStream(final InputStream in, final DatasetAttributes attributes, final long[] gridPosition, ByteOrder byteOrder) {
+			super(in);
+			this.attributes = attributes;
+			this.gridPosition = gridPosition;
+			this.byteOrder = byteOrder;
+		}
+			@Override protected void beforeRead(int n) throws IOException {
+
+				if (start) {
+					readHeader();
+					start = false;
+				}
+			}
+
+			@Override
+			public <T> DataBlock<T> allocateDataBlock() throws IOException {
+				if (start) {
+					readHeader();
+					start = false;
+				}
+				if (mode == MODE_OBJECT) {
+					return (DataBlock<T>) attributes.getDataType().createDataBlock(null, gridPosition, numElements);
+				}
+				return (DataBlock<T>) attributes.getDataType().createDataBlock(blockSize, gridPosition, numElements);
+			}
+
+			private void readHeader() throws IOException {
+				final DataInput dis = getDataInput(in);
+				mode = dis.readShort();
+				if (mode == MODE_OBJECT) {
+					numElements = dis.readInt();
+					return;
+				}
+
+				final int nDim = dis.readShort();
+				blockSize = new int[nDim];
+				for (int d = 0; d < nDim; ++d)
+					blockSize[d] = dis.readInt();
+				if (mode == MODE_DEFAULT) {
+					numElements = DataBlock.getNumElements(blockSize);
+				} else {
+					numElements = dis.readInt();
+				}
+			}
+
+			@Override
+			public DataInput getDataInput(final InputStream inputStream) {
+
+				if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
+					return new DataInputStream(inputStream);
+				else
+					return new LittleEndianDataInputStream(inputStream);
+			}
+
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/RawBytes.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/RawBytes.java
new file mode 100644
index 00000000..bb3232e4
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/RawBytes.java
@@ -0,0 +1,139 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+import com.google.common.io.LittleEndianDataInputStream;
+import com.google.common.io.LittleEndianDataOutputStream;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonPrimitive;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+
+import javax.annotation.Nullable;
+
+@NameConfig.Name(value = RawBytes.TYPE)
+public class RawBytes implements Codec.ArrayCodec {
+
+	private static final long serialVersionUID = 3282569607795127005L;
+
+	public static final String TYPE = "bytes";
+
+	@NameConfig.Parameter(value = "endian", optional = true)
+	protected final ByteOrder byteOrder;
+
+	public RawBytes() {
+
+		this(ByteOrder.LITTLE_ENDIAN);
+	}
+
+	public RawBytes(final ByteOrder byteOrder) {
+
+		this.byteOrder = byteOrder;
+	}
+
+	@Nullable
+	public ByteOrder getByteOrder() {
+		return byteOrder;
+	}
+
+	@Override
+	public DataBlockInputStream decode(final DatasetAttributes attributes, final long[] gridPosition, InputStream in)
+			throws IOException {
+
+		return new DataBlockInputStream(in) {
+
+			private int[] blockSize = attributes.getBlockSize();
+			private int numElements = Arrays.stream(blockSize).reduce(1, (x, y) -> x * y);
+
+			@Override
+			protected void beforeRead(int n) {}
+
+			@Override
+			public DataBlock<?> allocateDataBlock() {
+
+				return attributes.getDataType().createDataBlock(blockSize, gridPosition, numElements);
+			}
+
+			@Override
+			public DataInput getDataInput(final InputStream inputStream) {
+
+				if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
+					return new DataInputStream(inputStream);
+
+				return new LittleEndianDataInputStream(inputStream);
+			}
+
+		};
+	}
+
+	@Override
+	public DataBlockOutputStream encode(final DatasetAttributes attributes, final DataBlock<?> dataBlock,
+			final OutputStream out)
+			throws IOException {
+
+		return new DataBlockOutputStream(out) {
+
+			@Override
+			protected void beforeWrite(int n) throws IOException {}
+
+			@Override
+			public DataOutput getDataOutput(OutputStream outputStream) {
+
+				if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
+					return new DataOutputStream(outputStream);
+				else
+					return new LittleEndianDataOutputStream(outputStream);
+			}
+		};
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+	public static final ByteOrderAdapter byteOrderAdapter = new ByteOrderAdapter();
+
+	public static class ByteOrderAdapter implements JsonDeserializer<ByteOrder>, JsonSerializer<ByteOrder> {
+
+		@Override
+		public JsonElement serialize(ByteOrder src, java.lang.reflect.Type typeOfSrc,
+				JsonSerializationContext context) {
+
+			if (src.equals(ByteOrder.LITTLE_ENDIAN))
+				return new JsonPrimitive("little");
+			else
+				return new JsonPrimitive("big");
+		}
+
+		@Override
+		public ByteOrder deserialize(JsonElement json, java.lang.reflect.Type typeOfT,
+				JsonDeserializationContext context) throws JsonParseException {
+
+			if (json.getAsString().equals("little"))
+				return ByteOrder.LITTLE_ENDIAN;
+			if (json.getAsString().equals("big"))
+				return ByteOrder.BIG_ENDIAN;
+
+			return null;
+		}
+
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/ChecksumCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/ChecksumCodec.java
new file mode 100644
index 00000000..7d7a58fb
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/ChecksumCodec.java
@@ -0,0 +1,119 @@
+package org.janelia.saalfeldlab.n5.codec.checksum;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.zip.CheckedInputStream;
+import java.util.zip.CheckedOutputStream;
+import java.util.zip.Checksum;
+
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.Codec.BytesCodec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+
+/**
+ * A {@link Codec} that appends a checksum to data when encoding and can validate against that checksum when decoding.
+ */
+public abstract class ChecksumCodec implements BytesCodec, DeterministicSizeCodec {
+
+	private static final long serialVersionUID = 3141427377277375077L;
+
+	private int numChecksumBytes;
+
+	private Checksum checksum;
+
+	public ChecksumCodec(Checksum checksum, int numChecksumBytes) {
+
+		this.checksum = checksum;
+		this.numChecksumBytes = numChecksumBytes;
+	}
+
+	public Checksum getChecksum() {
+
+		return checksum;
+	}
+
+	public int numChecksumBytes() {
+
+		return numChecksumBytes;
+	}
+
+	@Override
+	public CheckedOutputStream encode(final OutputStream out) throws IOException {
+
+		// when do we validate?
+		return new CheckedOutputStream(out, getChecksum()) {
+
+			private boolean closed = false;
+			@Override public void close() throws IOException {
+
+				if (!closed) {
+					writeChecksum(out);
+					closed = true;
+					out.close();
+				}
+			}
+		};
+	}
+
+	@Override
+	public CheckedInputStream decode(final InputStream in) throws IOException {
+
+		// TODO get the correct expected checksum
+		// TODO write a test with nested checksum codecs
+
+		// has to know the number of it needs to read?
+		return new CheckedInputStream(in, getChecksum());
+	}
+
+	public ByteBuffer decodeAndValidate(final InputStream in, int numBytes) throws IOException, ChecksumException {
+
+		final CheckedInputStream cin = decode(in);
+		final byte[] data = new byte[numBytes];
+		cin.read(data);
+
+		if (!valid(in))
+			throw new ChecksumException("Invalid checksum");
+
+		return ByteBuffer.wrap(data);
+	}
+
+	@Override
+	public long encodedSize(final long size) {
+
+		return size + numChecksumBytes();
+	}
+
+	@Override
+	public long decodedSize(final long size) {
+
+		return size - numChecksumBytes();
+	}
+
+	protected boolean valid(InputStream in) throws IOException {
+
+		return readChecksum(in) == getChecksum().getValue();
+	}
+
+	protected long readChecksum(InputStream in) throws IOException {
+
+		final byte[] checksum = new byte[numChecksumBytes()];
+		in.read(checksum);
+		return ByteBuffer.wrap(checksum).getLong();
+	}
+
+	/**
+	 * Return the value of the checksum as a {@link ByteBuffer} to be serialized.
+	 *
+	 * @return a ByteBuffer representing the checksum value
+	 */
+	public abstract ByteBuffer getChecksumValue();
+
+	public void writeChecksum(OutputStream out) throws IOException {
+
+		out.write(getChecksumValue().array());
+	}
+
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/ChecksumException.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/ChecksumException.java
new file mode 100644
index 00000000..034343c4
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/ChecksumException.java
@@ -0,0 +1,12 @@
+package org.janelia.saalfeldlab.n5.codec.checksum;
+
+public class ChecksumException extends Exception {
+
+	private static final long serialVersionUID = 905130066386622561L;
+
+	public ChecksumException(final String message) {
+
+		super(message);
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/Crc32cChecksumCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/Crc32cChecksumCodec.java
new file mode 100644
index 00000000..f7c03608
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/codec/checksum/Crc32cChecksumCodec.java
@@ -0,0 +1,46 @@
+package org.janelia.saalfeldlab.n5.codec.checksum;
+
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+import java.nio.ByteBuffer;
+import java.util.zip.CRC32;
+
+@NameConfig.Name(Crc32cChecksumCodec.TYPE)
+public class Crc32cChecksumCodec extends ChecksumCodec {
+
+	private static final long serialVersionUID = 7424151868725442500L;
+
+	public static final String TYPE = "crc32c";
+
+	public Crc32cChecksumCodec() {
+
+		super(new CRC32(), 4);
+	}
+
+	@Override
+	public long encodedSize(final long size) {
+
+		return size + numChecksumBytes();
+	}
+
+	@Override
+	public long decodedSize(final long size) {
+
+		return size - numChecksumBytes();
+	}
+
+	@Override
+	public ByteBuffer getChecksumValue() {
+
+		final ByteBuffer buf = ByteBuffer.allocate(numChecksumBytes());
+		buf.putInt((int)getChecksum().getValue());
+		return buf;
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/serialization/JsonArrayUtils.java b/src/main/java/org/janelia/saalfeldlab/n5/serialization/JsonArrayUtils.java
new file mode 100644
index 00000000..b65fbb6c
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/serialization/JsonArrayUtils.java
@@ -0,0 +1,20 @@
+package org.janelia.saalfeldlab.n5.serialization;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+
+public class JsonArrayUtils {
+
+	public static void reverse(final JsonArray array) {
+
+		JsonElement a;
+		final int max = array.size() - 1;
+		for (int i = (max - 1) / 2; i >= 0; --i) {
+			final int j = max - i;
+			a = array.get(i);
+			array.set(i, array.get(j));
+			array.set(j, a);
+		}
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/serialization/N5Annotations.java b/src/main/java/org/janelia/saalfeldlab/n5/serialization/N5Annotations.java
new file mode 100644
index 00000000..500f139f
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/serialization/N5Annotations.java
@@ -0,0 +1,18 @@
+package org.janelia.saalfeldlab.n5.serialization;
+
+import java.io.Serializable;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+public interface N5Annotations extends Serializable {
+
+	@Inherited
+	@Retention(RetentionPolicy.RUNTIME)
+	@Target(ElementType.FIELD)
+	@interface ReverseArray {
+	}
+}
+
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/serialization/NameConfig.java b/src/main/java/org/janelia/saalfeldlab/n5/serialization/NameConfig.java
new file mode 100644
index 00000000..2ccb122e
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/serialization/NameConfig.java
@@ -0,0 +1,44 @@
+package org.janelia.saalfeldlab.n5.serialization;
+
+import org.scijava.annotations.Indexable;
+
+import java.io.Serializable;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+public interface NameConfig extends Serializable {
+
+	@Retention(RetentionPolicy.RUNTIME)
+	@Inherited
+	@Target(ElementType.TYPE)
+	@interface Prefix {
+		String value();
+	}
+
+	@Retention(RetentionPolicy.RUNTIME)
+	@Inherited
+	@Target(ElementType.TYPE)
+	@Indexable
+	@interface Name {
+		String value();
+	}
+
+	@Retention(RetentionPolicy.RUNTIME)
+	@Inherited
+	@Target(ElementType.FIELD)
+	@interface Parameter {
+		String value() default "";
+		boolean optional() default false;
+	}
+
+	default String getType() {
+
+		final Name type = getClass().getAnnotation(Name.class);
+		return type == null ? null : type.value();
+
+	}
+}
+
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/AbstractShard.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/AbstractShard.java
new file mode 100644
index 00000000..4e2ac831
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/AbstractShard.java
@@ -0,0 +1,52 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+
+
+public abstract class AbstractShard<T> implements Shard<T> {
+
+	protected final DatasetAttributes datasetAttributes;
+
+	protected ShardIndex index;
+
+	private final long[] gridPosition;
+
+	public AbstractShard(final DatasetAttributes datasetAttributes, final long[] gridPosition,
+			final ShardIndex index) {
+
+		this.datasetAttributes = datasetAttributes;
+		this.gridPosition = gridPosition;
+		this.index = index;
+	}
+
+	@Override
+	public DatasetAttributes getDatasetAttributes() {
+
+		return datasetAttributes;
+	}
+
+	@Override
+	public int[] getSize() {
+
+		return getDatasetAttributes().getShardSize();
+	}
+
+	@Override
+	public int[] getBlockSize() {
+
+		return datasetAttributes.getBlockSize();
+	}
+
+	@Override
+	public long[] getGridPosition() {
+
+		return gridPosition;
+	}
+
+	@Override
+	public ShardIndex getIndex() {
+
+		return index;
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/InMemoryShard.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/InMemoryShard.java
new file mode 100644
index 00000000..c7274d85
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/InMemoryShard.java
@@ -0,0 +1,312 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import org.apache.commons.io.input.BoundedInputStream;
+import org.apache.commons.io.output.ByteArrayOutputStream;
+import org.apache.commons.io.output.CountingOutputStream;
+import org.apache.commons.io.output.ProxyOutputStream;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.DefaultBlockReader;
+import org.janelia.saalfeldlab.n5.DefaultBlockWriter;
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.LockedChannel;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+import org.janelia.saalfeldlab.n5.util.Position;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class InMemoryShard<T> extends AbstractShard<T> {
+
+	/* Map of a hash of the DataBlocks `gridPosition` to the block */
+	private final Map<Position, DataBlock<T>> blocks;
+	private ShardIndexBuilder indexBuilder;
+
+	/*
+	 * TODO:
+	 * Use morton- or c-ording instead of writing blocks out in the order they're added?
+	 * (later)
+	 */
+	public InMemoryShard(final DatasetAttributes datasetAttributes, final long[] shardPosition) {
+
+		this( datasetAttributes, shardPosition, null);
+		indexBuilder = new ShardIndexBuilder(this);
+		final IndexLocation indexLocation = ((ShardingCodec)datasetAttributes.getArrayCodec()).getIndexLocation();
+		indexBuilder.indexLocation(indexLocation);
+	}
+
+	public InMemoryShard(final DatasetAttributes datasetAttributes, final long[] gridPosition,
+			ShardIndex index) {
+
+		super(datasetAttributes, gridPosition, index);
+		blocks = new TreeMap<>();
+	}
+
+	private void storeBlock(DataBlock<T> block) {
+
+		blocks.put(Position.wrap(block.getGridPosition()), block);
+	}
+
+	/*
+	 * Returns the {@link DataBlock} given a block grid position.
+	 * <p>
+	 * The block grid position is relative to the image, not relative to this shard.
+	 */
+	@Override public DataBlock<T> getBlock(long... blockGridPosition) {
+
+		return blocks.get(Position.wrap(blockGridPosition));
+	}
+
+	@Override
+	public void writeBlock(DataBlock<T> block) {
+		
+		addBlock(block);
+	}
+
+	public void addBlock(DataBlock<T> block) {
+
+		storeBlock(block);
+	}
+
+	public int numBlocks() {
+
+		return blocks.size();
+	}
+	
+	@Override
+	public List<DataBlock<T>> getBlocks() {
+
+		return new ArrayList<>(blocks.values());
+	}
+
+	public List<DataBlock<T>> getBlocks( int[] blockIndexes ) {
+
+		final ArrayList<DataBlock<T>> out = new ArrayList<>();
+		final int[] blocksPerShard = getDatasetAttributes().getBlocksPerShard();
+
+		long[] position = new long[ getSize().length ];
+		for( int idx : blockIndexes ) {
+			GridIterator.indexToPosition(idx, blocksPerShard, position);
+			DataBlock<T> blk = getBlock(position);
+			if( blk != null )
+				out.add(blk);
+		}
+		return out;
+	}
+
+	protected IndexLocation indexLocation() {
+
+		if (index != null)
+			return index.getLocation();
+		else
+			return indexBuilder.getLocation();
+	}
+
+	@Override
+	public ShardIndex getIndex() {
+
+		if( index != null )
+			return index;
+		else
+			return indexBuilder.build();
+	}
+
+	public void write(final KeyValueAccess keyValueAccess, final String path) throws IOException {
+
+		try (final LockedChannel lockedChannel = keyValueAccess.lockForWriting(path)) {
+			try (final OutputStream os = lockedChannel.newOutputStream()) {
+				write(os);
+			}
+		}
+	}
+
+	public void write(final OutputStream out) throws IOException {
+
+		if (indexLocation() == IndexLocation.END)
+			writeShardEndStream(out, this);
+		else
+			writeShardStart(out, this);
+	}
+
+	public static <T> InMemoryShard<T> readShard(
+			final KeyValueAccess kva, final String key, final long[] gridPosition, final DatasetAttributes attributes)
+			throws IOException {
+
+		try (final LockedChannel lockedChannel = kva.lockForReading(key)) {
+			try (final InputStream is = lockedChannel.newInputStream()) {
+				return readShard(is, gridPosition, attributes);
+			}
+		}
+
+		// Another possible implementation
+//		return fromShard(new VirtualShard<>(attributes, gridPosition, kva, key));
+	}
+
+	@SuppressWarnings("hiding")
+	public static <T> InMemoryShard<T> readShard(
+			final InputStream inputStream, final long[] gridPosition, final DatasetAttributes attributes) throws IOException {
+
+		try (ByteArrayOutputStream result = new ByteArrayOutputStream()) {
+			byte[] buffer = new byte[1024];
+			for (int length; (length = inputStream.read(buffer)) != -1;) {
+				result.write(buffer, 0, length);
+			}
+			return readShard(result.toByteArray(), gridPosition, attributes);
+		}
+	}
+
+	public static <T> InMemoryShard<T> readShard(
+			final byte[] data,
+			long[] shardPosition, final DatasetAttributes attributes) throws IOException {
+
+		final ShardIndex index = ((ShardingCodec)attributes.getArrayCodec()).createIndex(attributes);
+		ShardIndex.read(data, index);
+
+		final InMemoryShard<T> shard = new InMemoryShard<T>(attributes, shardPosition, index);
+		final GridIterator it = new GridIterator(attributes.getBlocksPerShard());
+		while (it.hasNext()) {
+
+			final long[] p = it.next();
+			final int[] pInt = GridIterator.long2int(p);
+
+			if (index.exists(pInt)) {
+
+				final ByteArrayInputStream is = new ByteArrayInputStream(data);
+				is.skip(index.getOffset(pInt));
+				BoundedInputStream bIs = BoundedInputStream.builder().setInputStream(is)
+						.setMaxCount(index.getNumBytes(pInt)).get();
+
+				final long[] blockGridPosition = attributes.getBlockPositionFromShardPosition(shardPosition, p);
+				@SuppressWarnings("unchecked")
+				final DataBlock<T> blk = (DataBlock<T>) DefaultBlockReader.readBlock(bIs, attributes,
+						blockGridPosition);
+				shard.addBlock(blk);
+				bIs.close();
+			}
+		}
+
+		return shard;
+	}
+
+	public static <T> void writeShard(final KeyValueAccess keyValueAccess, final String path, final InMemoryShard<T> shard) throws IOException {
+
+		try (final LockedChannel lockedChannel = keyValueAccess.lockForWriting(path)) {
+			try (final OutputStream os = lockedChannel.newOutputStream()) {
+				writeShard(os, shard);
+			}
+		}
+	}
+
+	public static <T> void writeShard(final OutputStream out, final Shard<T> shard) throws IOException {
+
+		fromShard(shard).write(out);
+	}
+
+	public static <T> InMemoryShard<T> fromShard(Shard<T> shard) {
+
+		if (shard instanceof InMemoryShard)
+			return (InMemoryShard<T>) shard;
+
+		final InMemoryShard<T> inMemoryShard = new InMemoryShard<T>(
+				shard.getDatasetAttributes(),
+				shard.getGridPosition());
+
+		shard.forEach(blk -> inMemoryShard.addBlock(blk));
+		return inMemoryShard;
+	}
+
+	protected static <T> void writeShardEndStream(
+			final OutputStream out,
+			InMemoryShard<T> shard ) throws IOException {
+
+		final DatasetAttributes datasetAttributes = shard.getDatasetAttributes();
+
+		final ShardIndexBuilder indexBuilder = new ShardIndexBuilder(shard);
+		indexBuilder.indexLocation(IndexLocation.END);
+		final ShardingCodec shardingCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+		indexBuilder.setCodecs(shardingCodec.getIndexCodecs());
+
+		// Necessary to stop `close()` when writing blocks from closing out base OutputStream
+		final ProxyOutputStream nop = new ProxyOutputStream(out) {
+			@Override public void close() {
+				//nop
+			}
+		};
+
+		final CountingOutputStream cout = new CountingOutputStream(nop);
+
+		long bytesWritten = 0;
+		for (DataBlock<T> block : shard.getBlocks()) {
+			DefaultBlockWriter.writeBlock(cout, datasetAttributes, block);
+			final long size = cout.getByteCount() - bytesWritten;
+			bytesWritten = cout.getByteCount();
+
+			indexBuilder.addBlock( block.getGridPosition(), size);
+		}
+
+		ShardIndex.write(indexBuilder.build(), out);
+	}
+
+	protected static <T> void writeShardEnd(
+			final OutputStream out,
+			InMemoryShard<T> shard ) throws IOException {
+
+		final ShardIndexBuilder indexBuilder = new ShardIndexBuilder(shard);
+		indexBuilder.indexLocation(IndexLocation.END);
+		final DatasetAttributes datasetAttributes = shard.getDatasetAttributes();
+		final ShardingCodec shardingCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+		indexBuilder.setCodecs(shardingCodec.getIndexCodecs());
+
+		for (DataBlock<T> block : shard.getBlocks()) {
+			final ByteArrayOutputStream os = new ByteArrayOutputStream();
+			DefaultBlockWriter.writeBlock(os, datasetAttributes, block);
+
+			indexBuilder.addBlock(block.getGridPosition(), os.size());
+			out.write(os.toByteArray());
+		}
+
+		ShardIndex.write(indexBuilder.build(), out);
+	}
+
+	protected static <T> void writeShardStart(
+			final OutputStream out,
+			InMemoryShard<T> shard ) throws IOException {
+
+		final DatasetAttributes datasetAttributes = shard.getDatasetAttributes();
+		final ShardingCodec shardingCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+
+		final ShardIndexBuilder indexBuilder = new ShardIndexBuilder(shard);
+		indexBuilder.indexLocation(IndexLocation.START);
+		indexBuilder.setCodecs(shardingCodec.getIndexCodecs());
+
+		final List<byte[]> blockData = new ArrayList<>(shard.numBlocks());
+		for (DataBlock<T> block : shard.getBlocks()) {
+			final ByteArrayOutputStream os = new ByteArrayOutputStream();
+			DefaultBlockWriter.writeBlock(os, datasetAttributes, block);
+
+			blockData.add(os.toByteArray());
+			indexBuilder.addBlock(block.getGridPosition(), os.size());
+		}		
+
+		try {
+			final ByteArrayOutputStream os = new ByteArrayOutputStream();
+			ShardIndex.write(indexBuilder.build(), os);
+			out.write(os.toByteArray());
+
+			for( byte[] data : blockData )
+				out.write(data);
+
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+
+	}
+	
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/Shard.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/Shard.java
new file mode 100644
index 00000000..3f55dfbc
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/Shard.java
@@ -0,0 +1,182 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+
+public interface Shard<T> extends Iterable<DataBlock<T>> {
+
+	/**
+	 * Returns the number of blocks this shard contains along all dimensions.
+	 *
+	 * The size of a shard expected to be smaller than or equal to the spacing of the shard grid. The dimensionality of size is expected to be equal to the dimensionality of the
+	 * dataset. Consistency is not enforced.
+	 *
+	 * @return size of the shard in units of blocks
+	 */
+	default int[] getBlockGridSize() {
+
+		return getDatasetAttributes().getBlocksPerShard();
+	}
+
+	DatasetAttributes getDatasetAttributes();
+
+	/**
+	 * Returns the size of shards in pixel units.
+	 *
+	 * @return shard size
+	 */
+	default int[] getSize() {
+		return getDatasetAttributes().getShardSize();
+	}
+
+	/**
+	 * Returns the size of blocks in pixel units.
+	 *
+	 * @return block size
+	 */
+	default int[] getBlockSize() {
+		return getDatasetAttributes().getBlockSize();
+	}
+
+	/**
+	 * Returns the position of this shard on the shard grid.
+	 *
+	 * The dimensionality of the grid position is expected to be equal to the dimensionality of the dataset. Consistency is not enforced.
+	 *
+	 * @return position on the shard grid
+	 */
+	public long[] getGridPosition();
+
+	/**
+	 * Returns of the block at the given position relative to this shard, or null if this shard does not contain the given block.
+	 *
+	 * @return the shard position
+	 */
+	default int[] getBlockPosition(long... blockPosition) {
+
+		final long[] shardPos = getDatasetAttributes().getShardPositionForBlock(blockPosition);
+		return getDatasetAttributes().getBlockPositionInShard(shardPos, blockPosition);
+	}
+	
+	/**
+	 * Returns the position in pixels of the 
+	 * 
+	 * @return the min 
+	 */
+	default long[] getShardMinPosition(long... shardPosition) {
+
+		final int[] shardSize = getSize();
+		final long[] shardMin = new long[shardSize.length];
+		for (int i = 0; i < shardSize.length; i++) {
+			shardMin[i] = shardPosition[i] * shardSize[i];
+		}
+		return shardMin;
+	}
+
+	/**
+	 * Returns the position of the shard containing the block with the given block position.
+	 *
+	 * @return the shard position
+	 */
+	default long[] getShardPosition(long... blockPosition) {
+
+		final int[] shardBlockDimensions = getBlockGridSize();
+		final long[] shardGridPosition = new long[shardBlockDimensions.length];
+		for (int i = 0; i < shardGridPosition.length; i++) {
+			shardGridPosition[i] = (long)Math.floor((double)(blockPosition[i]) / shardBlockDimensions[i]);
+		}
+
+		return shardGridPosition;
+	}
+
+	public DataBlock<T> getBlock(long... blockGridPosition);
+
+	public void writeBlock(DataBlock<T> block);
+
+	//TODO Caleb: add writeBlocks that does NOT always expect to overwrite the entire existing Shard
+
+	default Iterator<DataBlock<T>> iterator() {
+
+		return new DataBlockIterator<>(this);
+	}
+
+	default int getNumBlocks() {
+
+		return Arrays.stream(getBlockGridSize()).reduce(1, (x, y) -> x * y);
+	}
+
+	default List<DataBlock<T>> getBlocks() {
+
+		final List<DataBlock<T>> blocks = new ArrayList<>();
+		for (DataBlock<T> block : this) {
+			blocks.add(block);
+		}
+		return blocks;
+	}
+
+	/**
+	 * Returns an {@link Iterator} over block positions contained in this shard.
+	 * 
+	 * @return
+	 */
+	default Iterator<long[]> blockPositionIterator() {
+
+		final int nd = getSize().length;
+		long[] min = getDatasetAttributes().getBlockPositionFromShardPosition( getGridPosition(), new long[nd]);
+		return new GridIterator(GridIterator.int2long(getBlockGridSize()), min);
+	}
+
+	ShardIndex getIndex();
+
+	static <T,A extends DatasetAttributes & ShardParameters> Shard<T> createEmpty(final A attributes, long... shardPosition) {
+
+		final long[] emptyIndex = new long[(int)(2 * attributes.getNumBlocks())];
+		Arrays.fill(emptyIndex, ShardIndex.EMPTY_INDEX_NBYTES);
+		final ShardIndex shardIndex = new ShardIndex(attributes.getBlocksPerShard(), emptyIndex, ShardingCodec.IndexLocation.END);
+		return new InMemoryShard<T>(attributes, shardPosition, shardIndex);
+	}
+
+	class DataBlockIterator<T> implements Iterator<DataBlock<T>> {
+
+		private final GridIterator it;
+		private final Shard<T> shard;
+		private final ShardIndex index;
+		// TODO ShardParameters is deprecated?
+		private final ShardParameters attributes;
+		private int blockIndex = 0;
+
+		public DataBlockIterator(final Shard<T> shard) {
+
+			this.shard = shard;
+			this.index = shard.getIndex();
+			this.attributes = shard.getDatasetAttributes();
+			this.blockIndex = 0;
+			it = new GridIterator(shard.getBlockGridSize());
+		}
+
+		@Override
+		public boolean hasNext() {
+
+			for (int i = blockIndex; i < attributes.getNumBlocks(); i++) {
+				if (index.exists(i))
+					return true;
+			}
+			return false;
+		}
+
+		@Override
+		public DataBlock<T> next() {
+			while (!index.exists(blockIndex++))
+				it.fwd();
+
+			return shard.getBlock(it.next());
+		}
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardException.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardException.java
new file mode 100644
index 00000000..d208c62e
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardException.java
@@ -0,0 +1,14 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import org.janelia.saalfeldlab.n5.N5Exception;
+
+public class ShardException extends N5Exception {
+
+	private static final long serialVersionUID = -77907634621557855L;
+
+	public static class IndexException extends ShardException {
+
+		private static final long serialVersionUID = 3924426352575114063L;
+
+	}
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardIndex.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardIndex.java
new file mode 100644
index 00000000..655bf62c
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardIndex.java
@@ -0,0 +1,323 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import org.apache.commons.io.input.BoundedInputStream;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.DefaultBlockReader;
+import org.janelia.saalfeldlab.n5.DefaultBlockWriter;
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.LockedChannel;
+import org.janelia.saalfeldlab.n5.LongArrayDataBlock;
+import org.janelia.saalfeldlab.n5.N5Exception;
+import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.UncheckedIOException;
+import java.nio.channels.Channels;
+import java.nio.channels.FileChannel;
+import java.util.Arrays;
+import java.util.stream.IntStream;
+
+public class ShardIndex extends LongArrayDataBlock {
+
+	public static final long EMPTY_INDEX_NBYTES = 0xFFFFFFFFFFFFFFFFL;
+	private static final int BYTES_PER_LONG = 8;
+	private static final int LONGS_PER_BLOCK = 2;
+	private static final long[] DUMMY_GRID_POSITION = null;
+
+	private final IndexLocation location;
+
+	private final DeterministicSizeCodec[] codecs;
+
+	public ShardIndex(int[] shardBlockGridSize, long[] data, IndexLocation location, final DeterministicSizeCodec... codecs) {
+
+		super(prepend(LONGS_PER_BLOCK, shardBlockGridSize), DUMMY_GRID_POSITION, data);
+		this.codecs = codecs;
+		this.location = location;
+	}
+
+	public ShardIndex(int[] shardBlockGridSize, IndexLocation location, DeterministicSizeCodec... codecs) {
+
+		this(shardBlockGridSize, emptyIndexData(shardBlockGridSize), location, codecs);
+	}
+
+	public ShardIndex(int[] shardBlockGridSize, DeterministicSizeCodec... codecs) {
+
+		this(shardBlockGridSize, emptyIndexData(shardBlockGridSize), IndexLocation.END, codecs);
+	}
+
+	public boolean exists(int[] gridPosition) {
+
+		return getOffset(gridPosition) != EMPTY_INDEX_NBYTES ||
+				getNumBytes(gridPosition) != EMPTY_INDEX_NBYTES;
+	}
+
+	public boolean exists(int blockNum) {
+
+		return data[blockNum * 2] != EMPTY_INDEX_NBYTES ||
+				data[blockNum * 2 + 1] != EMPTY_INDEX_NBYTES;
+	}
+
+	public int getNumBlocks() {
+
+		return Arrays.stream(getSize()).reduce(1, (x, y) -> x * y);
+	}
+
+	public boolean isEmpty() {
+
+		return !IntStream.range(0, getNumBlocks()).anyMatch(this::exists);
+	}
+
+	public IndexLocation getLocation() {
+
+		return location;
+	}
+
+	public long getOffset(int... gridPosition) {
+
+		return data[getOffsetIndex(gridPosition)];
+	}
+
+	public long getOffsetByBlockIndex(int index) {
+		return data[index * 2];
+	}
+
+	public long getNumBytes(int... gridPosition) {
+
+		return data[getNumBytesIndex(gridPosition)];
+	}
+
+	public long getNumBytesByBlockIndex(int index) {
+
+		return data[index * 2 + 1];
+	}
+
+	public void set(long offset, long nbytes, int[] gridPosition) {
+
+		final int i = getOffsetIndex(gridPosition);
+		data[i] = offset;
+		data[i + 1] = nbytes;
+	}
+
+	protected int getOffsetIndex(int... gridPosition) {
+
+		int idx = (int) gridPosition[0];
+		int cumulativeSize = 1;
+		for (int i = 1; i < gridPosition.length; i++) {
+			cumulativeSize *= size[i];
+			idx += gridPosition[i] * cumulativeSize;
+		}
+		return idx * 2;
+	}
+
+	protected int getNumBytesIndex(int... gridPosition) {
+
+		return getOffsetIndex(gridPosition) + 1;
+	}
+
+	public long numBytes() {
+
+		final int numEntries = Arrays.stream(getSize()).reduce(1, (x, y) -> x * y);
+		final int numBytesFromBlocks = numEntries * BYTES_PER_LONG;
+		long totalNumBytes = numBytesFromBlocks;
+		for (Codec codec : codecs) {
+			if (codec instanceof DeterministicSizeCodec) {
+				totalNumBytes = ((DeterministicSizeCodec)codec).encodedSize(totalNumBytes);
+			}
+		}
+		return totalNumBytes;
+	}
+
+	public static boolean read(byte[] data, final ShardIndex index) {
+
+		final IndexByteBounds byteBounds = byteBounds(index, data.length);
+		final ByteArrayInputStream is = new ByteArrayInputStream(data);
+		is.skip(byteBounds.start);
+		try {
+		BoundedInputStream bIs = BoundedInputStream.builder()
+				.setInputStream(is)
+				.setMaxCount(index.numBytes()).get();
+
+			read(bIs, index);
+			return true;
+		} catch (IOException e) {
+			return false;
+		}
+	}
+
+	public static void read(InputStream in, final ShardIndex index) throws IOException {
+
+		@SuppressWarnings("unchecked")
+		final DataBlock<long[]> indexBlock = (DataBlock<long[]>) DefaultBlockReader.readBlock(in,
+				index.getIndexAttributes(), index.gridPosition);
+		final long[] indexData = indexBlock.getData();
+		System.arraycopy(indexData, 0, index.data, 0, index.data.length);
+	}
+
+	public static boolean read(
+			final KeyValueAccess keyValueAccess,
+			final String key,
+			final ShardIndex index
+	) {
+
+		try {
+			final IndexByteBounds byteBounds = byteBounds(index, keyValueAccess.size(key));
+			try (final LockedChannel lockedChannel = keyValueAccess.lockForReading(key, byteBounds.start, byteBounds.end)) {
+				try (final InputStream in = lockedChannel.newInputStream()) {
+						read(in,index);
+						return true;
+				}
+			} catch (final IOException | UncheckedIOException e) {
+				throw new N5IOException("Failed to read shard index from " + key, e);
+			}
+		} catch (final IOException | N5Exception.N5NoSuchKeyException e) {
+			return false;
+		}
+	}
+
+	public static void write(
+			final ShardIndex index,
+			final KeyValueAccess keyValueAccess,
+			final String key
+	) throws IOException {
+
+		final long start = index.location == IndexLocation.START ? 0 : sizeOrZero( keyValueAccess, key) ;
+		try (final LockedChannel lockedChannel = keyValueAccess.lockForWriting(key, start, index.numBytes())) {
+			try (final OutputStream os = lockedChannel.newOutputStream()) {
+				write(index, os);
+			}
+		} catch (final IOException | UncheckedIOException e) {
+			throw new N5IOException("Failed to write shard index to " + key, e);
+		}
+	}
+
+	private static long sizeOrZero(final KeyValueAccess keyValueAccess, final String key) {
+		try {
+			return keyValueAccess.size(key);
+		} catch (Exception e) {
+			return 0;
+		}
+	}
+
+	public static void write(final ShardIndex index, OutputStream out) throws IOException {
+
+		DefaultBlockWriter.writeBlock(out, index.getIndexAttributes(), index);
+	}
+
+	private DatasetAttributes getIndexAttributes() {
+
+		final DatasetAttributes indexAttributes =
+				new DatasetAttributes(
+						Arrays.stream(getSize()).mapToLong(it -> it).toArray(),
+						getSize(),
+						DataType.UINT64,
+						codecs
+				);
+		return indexAttributes;
+	}
+
+	public static IndexByteBounds byteBounds(DatasetAttributes datasetAttributes, final long objectSize) {
+
+		ShardingCodec shardCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+		final ShardIndex index = shardCodec.createIndex(datasetAttributes);
+
+		final long indexSize = index.numBytes();
+		return byteBounds(indexSize, index.location, objectSize);
+	}
+
+	public static IndexByteBounds byteBounds(final ShardIndex index, long objectSize) {
+		return byteBounds(index.numBytes(), index.location, objectSize);
+	}
+
+	public static IndexByteBounds byteBounds(final long indexSize, final IndexLocation indexLocation, final long objectSize) {
+
+		if (indexLocation == IndexLocation.START) {
+			return new IndexByteBounds(0L, indexSize);
+		} else {
+			return new IndexByteBounds(objectSize - indexSize, objectSize - 1);
+		}
+	}
+
+	public static class IndexByteBounds {
+
+		public final long start;
+		public final long end;
+		public final long size;
+
+		public IndexByteBounds(long start, long end) {
+
+			this.start = start;
+			this.end = end;
+			this.size = end - start + 1;
+		}
+	}
+
+	//TODO Caleb: Probably don't need to keep this eventually
+	public static ShardIndex read(FileChannel channel, DatasetAttributes datasetAttributes) throws IOException {
+
+		// TODO need codecs
+		// TODO FileChannel is too specific - generalize
+		ShardingCodec shardingCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+		final int[] indexShape = prepend(2, datasetAttributes.getBlocksPerShard());
+		final int indexSize = (int)Arrays.stream(indexShape).reduce(1, (x, y) -> x * y);
+		final int indexBytes = BYTES_PER_LONG * indexSize;
+
+		if (shardingCodec.getIndexLocation() == IndexLocation.END) {
+			channel.position(channel.size() - indexBytes);
+		}
+
+		final InputStream is = Channels.newInputStream(channel);
+		final DataInputStream dis = new DataInputStream(is);
+
+		final long[] indexes = new long[indexSize];
+		for (int i = 0; i < indexSize; i++) {
+			indexes[i] = dis.readLong();
+		}
+
+		return new ShardIndex(indexShape, indexes, IndexLocation.END);
+	}
+
+	private static long[] emptyIndexData(final int[] size) {
+
+		final int N = 2 * Arrays.stream(size).reduce(1, (x, y) -> x * y);
+		final long[] data = new long[N];
+		Arrays.fill(data, EMPTY_INDEX_NBYTES);
+		return data;
+	}
+
+	private static int[] prepend(final int value, final int[] array) {
+
+		final int[] indexBlockSize = new int[array.length + 1];
+		indexBlockSize[0] = value;
+		System.arraycopy(array, 0, indexBlockSize, 1, array.length);
+		return indexBlockSize;
+	}
+
+	@Override
+	public boolean equals(Object other) {
+
+		if (other instanceof ShardIndex) {
+
+			final ShardIndex index = (ShardIndex) other;
+			if (this.location != index.location)
+				return false;
+
+			if (!Arrays.equals(this.size, index.size))
+				return false;
+
+			if (!Arrays.equals(this.data, index.data))
+				return false;
+
+		}
+		return true;
+	}
+}
+
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardIndexBuilder.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardIndexBuilder.java
new file mode 100644
index 00000000..41d505af
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardIndexBuilder.java
@@ -0,0 +1,86 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import java.util.Arrays;
+
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+
+public class ShardIndexBuilder {
+
+	private final Shard<?> shard;
+
+	private ShardIndex temporaryIndex;
+
+	private IndexLocation location = IndexLocation.END;
+
+	private DeterministicSizeCodec[] codecs;
+	
+	private long currentOffset = 0;
+
+	public ShardIndexBuilder(Shard<?> shard) {
+
+		this.shard = shard;
+		this.temporaryIndex = new ShardIndex(shard.getBlockGridSize(), location);
+	}
+
+	public ShardIndex build() {
+
+		return new ShardIndex(
+				shard.getBlockGridSize(),
+				temporaryIndex.getData(),
+				location,
+				codecs);
+	}
+
+	public ShardIndexBuilder indexLocation(IndexLocation location) {
+
+		this.location = location;
+		this.temporaryIndex = new ShardIndex(shard.getBlockGridSize(), location);
+		updateInitialOffset();
+		return this;
+	}
+
+	public IndexLocation getLocation() {
+
+		return this.location;
+	}
+
+	public ShardIndexBuilder setCodecs(DeterministicSizeCodec... codecs) {
+
+		this.codecs = codecs;
+		final ShardIndex newIndex = new ShardIndex(shard.getBlockGridSize(), temporaryIndex.getLocation(), codecs);
+		this.temporaryIndex = newIndex;
+		updateInitialOffset();
+		return this;
+	}
+
+	public ShardIndexBuilder addBlock(long[] blockPosition, long numBytes) {
+		//TODO Caleb: Maybe move to ShardIndex?
+		final int[] blockPositionInShard = shard.getDatasetAttributes().getBlockPositionInShard(
+				shard.getGridPosition(),
+				blockPosition);
+
+		if (blockPositionInShard == null) {
+			throw new IllegalArgumentException(String.format(
+					"The block at position %s is not contained in the shard at position : %s and size : %s )",
+					Arrays.toString(blockPosition),
+					Arrays.toString(shard.getGridPosition()),
+					Arrays.toString(shard.getSize())));
+		}
+
+		temporaryIndex.set(currentOffset, numBytes, blockPositionInShard);
+		currentOffset += numBytes;
+
+		return this;
+	}
+
+	private void updateInitialOffset() {
+
+		if (location == IndexLocation.END)
+			currentOffset = 0;
+		else
+			currentOffset = temporaryIndex.numBytes();
+
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardParameters.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardParameters.java
new file mode 100644
index 00000000..1791c944
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardParameters.java
@@ -0,0 +1,227 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Spliterator;
+import java.util.Spliterators;
+import java.util.TreeMap;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+import org.janelia.saalfeldlab.n5.BlockParameters;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+import org.janelia.saalfeldlab.n5.util.Position;
+
+import javax.annotation.CheckForNull;
+
+@Deprecated
+public interface ShardParameters extends BlockParameters {
+
+
+	/**
+	 * The size of the blocks in pixel units.
+	 *
+	 * @return the number of pixels per dimension for this shard.
+	 */
+	@CheckForNull
+	int[] getShardSize();
+
+
+	/**
+	 * Returns the number of blocks per dimension for a shard.
+	 *
+	 * @return the size of the block grid of a shard
+	 */
+	default int[] getBlocksPerShard() {
+
+		final int[] shardSize = getShardSize();
+		Objects.requireNonNull(shardSize, "getShardSize() must not be null");
+		final int nd = getNumDimensions();
+		final int[] blocksPerShard = new int[nd];
+		final int[] blockSize = getBlockSize();
+		for (int i = 0; i < nd; i++)
+			blocksPerShard[i] = shardSize[i] / blockSize[i];
+
+		return blocksPerShard;
+	}
+	
+	/**
+	 * Returns the number of blocks per dimension that tile the image.
+	 *
+	 * @return blocks per image
+	 */
+	default long[] blocksPerImage() {
+		return IntStream.range(0, getNumDimensions())
+				.mapToLong(i -> (long) Math.ceil(getDimensions()[i] / getBlockSize()[i]))
+				.toArray();
+	}
+
+	/**
+	 * Returns the number of shards per dimension that tile the image.
+	 *
+	 * @return shards per image
+	 */
+	default long[] shardsPerImage() {
+		return IntStream.range(0, getNumDimensions())
+				.mapToLong(i -> (long)Math.ceil(getDimensions()[i] / getShardSize()[i]))
+				.toArray();
+	}
+
+	/**
+	 * Given a block's position relative to the array, returns the position of the shard containing that block relative to the shard grid.
+	 *
+	 * @param blockGridPosition
+	 *            position of a block relative to the array
+	 * @return the position of the containing shard in the shard grid
+	 */
+	default long[] getShardPositionForBlock(final long... blockGridPosition) {
+
+		final int[] blocksPerShard = getBlocksPerShard();
+		final long[] shardGridPosition = new long[blockGridPosition.length];
+		for (int i = 0; i < shardGridPosition.length; i++) {
+			shardGridPosition[i] = (int)Math.floor((double)blockGridPosition[i] / blocksPerShard[i]);
+		}
+
+		return shardGridPosition;
+	}
+	
+	/**
+	 * Returns the number of shards per dimension for the dataset.
+	 *
+	 * @return the size of the shard grid of a dataset
+	 */
+	default int[] getShardBlockGridSize() {
+
+		final int nd = getNumDimensions();
+		final int[] shardBlockGridSize = new int[nd];
+		final int[] blockSize = getBlockSize();
+		for (int i = 0; i < nd; i++)
+			shardBlockGridSize[i] = (int)(Math.ceil((double)getDimensions()[i] / blockSize[i]));
+
+		return shardBlockGridSize;
+	}
+
+	/**
+	 * Returns the block at the given position relative to this shard, or null if this shard does not contain the given block.
+	 *
+	 * @return the block position
+	 */
+	default int[] getBlockPositionInShard(final long[] shardPosition, final long[] blockPosition) {
+
+		// TODO check correctness 
+		final long[] shardPos = getShardPositionForBlock(blockPosition);
+		if (!Arrays.equals(shardPosition, shardPos))
+			return null;
+
+		final int[] shardSize = getBlocksPerShard();
+		final int[] blockShardPos = new int[shardSize.length];
+		for (int i = 0; i < shardSize.length; i++) {
+			blockShardPos[i] = (int)(blockPosition[i] % shardSize[i]);
+		}
+
+		return blockShardPos;
+	}
+
+	/**
+	 * Given a block's position relative to a shard, returns its position in pixels
+	 * relative to the image.
+	 * 
+	 * @param shardPosition shard position in the shard grid
+	 * @param blockPosition block position the 
+	 * @return the block's min pixel coordinate
+	 */
+	default long[] getBlockMinFromShardPosition(final long[] shardPosition, final long[] blockPosition) {
+
+		// is this useful?
+		final int[] blockSize = getBlockSize();
+		final int[] shardSize = getShardSize();
+		Objects.requireNonNull(shardSize, "getShardSize() must not be null");
+		final long[] blockImagePos = new long[shardSize.length];
+		for (int i = 0; i < shardSize.length; i++) {
+			blockImagePos[i] = (shardPosition[i] * shardSize[i]) + (blockPosition[i] * blockSize[i]);
+		}
+
+		return blockImagePos;
+	}
+
+	/**
+	 * Given a block's position relative to a shard, returns its position relative
+	 * to the image.
+	 *
+	 * @param shardPosition shard position in the shard grid
+	 * @param blockPosition block position relative to the shard 
+	 * @return the block position in the block grid
+	 */
+	default long[] getBlockPositionFromShardPosition(final long[] shardPosition, final long[] blockPosition) {
+
+		// is this useful?
+		final int[] shardBlockSize = getBlocksPerShard();
+		final long[] blockImagePos = new long[getNumDimensions()];
+		for (int i = 0; i < getNumDimensions(); i++) {
+			blockImagePos[i] = (shardPosition[i] * shardBlockSize[i]) + (blockPosition[i]);
+		}
+
+		return blockImagePos;
+	}
+
+	default Map<Position, List<long[]>> groupBlockPositions(final List<long[]> blockPositions) {
+
+		final TreeMap<Position, List<long[]>> map = new TreeMap<>();
+		for( final long[] blockPos : blockPositions ) {
+			Position shardPos = Position.wrap(getShardPositionForBlock(blockPos));
+			if( !map.containsKey(shardPos)) {
+				map.put(shardPos, new ArrayList<>());
+			}
+			map.get(shardPos).add(blockPos);
+		}
+
+		return map;
+	}
+	
+	default <T> Map<Position, List<DataBlock<T>>> groupBlocks(final List<DataBlock<T>> blocks) {
+
+		// figure out how to re-use groupBlockPositions here?
+		final TreeMap<Position, List<DataBlock<T>>> map = new TreeMap<>();
+		for (final DataBlock<T> block : blocks) {
+			Position shardPos = Position.wrap(getShardPositionForBlock(block.getGridPosition()));
+			if (!map.containsKey(shardPos)) {
+				map.put(shardPos, new ArrayList<>());
+			}
+			map.get(shardPos).add(block);
+		}
+
+		return map;
+	}
+
+	/**
+	 * @return the number of blocks per shard
+	 */
+	default long getNumBlocks() {
+
+		return Arrays.stream(getBlocksPerShard()).reduce(1, (x, y) -> x * y);
+	}
+
+	default Stream<long[]> blockPositions() {
+
+		final int[] blocksPerShard = getBlocksPerShard();
+		return toStream( new GridIterator(shardsPerImage()))
+				.flatMap( shardPosition -> {
+					final int nd = getNumDimensions();
+					final long[] min = getBlockPositionFromShardPosition(shardPosition, new long[nd]);
+					return toStream(new GridIterator(GridIterator.int2long(blocksPerShard), min));
+				});
+	}
+
+	static <T> Stream<T> toStream( final Iterator<T> it ) {
+		return StreamSupport.stream( Spliterators.spliteratorUnknownSize(
+				  it, Spliterator.ORDERED),
+		          false);
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardingCodec.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardingCodec.java
new file mode 100644
index 00000000..4da31eff
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/ShardingCodec.java
@@ -0,0 +1,163 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonPrimitive;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.serialization.N5Annotations;
+import org.janelia.saalfeldlab.n5.serialization.NameConfig;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.reflect.Type;
+
+@NameConfig.Name(ShardingCodec.TYPE)
+public class ShardingCodec implements Codec.ArrayCodec {
+
+	private static final long serialVersionUID = -5879797314954717810L;
+
+	public static final String TYPE = "sharding_indexed";
+
+	public static final String CHUNK_SHAPE_KEY = "chunk_shape";
+	public static final String INDEX_LOCATION_KEY = "index_location";
+	public static final String CODECS_KEY = "codecs";
+	public static final String INDEX_CODECS_KEY = "index_codecs";
+
+	public enum IndexLocation {
+		START, END;
+	}
+
+	@N5Annotations.ReverseArray // TODO need to reverse for zarr, not for n5
+	@NameConfig.Parameter(CHUNK_SHAPE_KEY)
+	private final int[] blockSize;
+
+	@NameConfig.Parameter(CODECS_KEY)
+	private final Codec[] codecs;
+
+	@NameConfig.Parameter(INDEX_CODECS_KEY)
+	private final DeterministicSizeCodec[] indexCodecs;
+
+	@NameConfig.Parameter(value = INDEX_LOCATION_KEY, optional = true)
+	private final IndexLocation indexLocation;
+
+	@SuppressWarnings("unused")
+	private ShardingCodec() {
+
+		blockSize = null;
+		codecs = null;
+		indexCodecs = null;
+		indexLocation = IndexLocation.END;
+	}
+
+	public ShardingCodec(
+			final int[] blockSize,
+			final Codec[] codecs,
+			final DeterministicSizeCodec[] indexCodecs,
+			final IndexLocation indexLocation) {
+
+		this.blockSize = blockSize;
+		this.codecs = codecs;
+		this.indexCodecs = indexCodecs;
+		this.indexLocation = indexLocation;
+	}
+
+	public int[] getBlockSize() {
+
+		return blockSize;
+	}
+
+	public IndexLocation getIndexLocation() {
+
+		return indexLocation;
+	}
+
+	public ArrayCodec getArrayCodec() {
+
+		return (Codec.ArrayCodec)codecs[0];
+	}
+
+	public BytesCodec[] getCodecs() {
+
+		if (codecs.length == 1)
+			return new BytesCodec[]{};
+
+		final BytesCodec[] bytesCodecs = new BytesCodec[codecs.length - 1];
+		System.arraycopy(codecs, 1, bytesCodecs, 0, bytesCodecs.length);
+		return bytesCodecs;
+	}
+
+	public DeterministicSizeCodec[] getIndexCodecs() {
+
+		return indexCodecs;
+	}
+
+	@Override public long[] getPositionForBlock(DatasetAttributes attributes, DataBlock<?> datablock) {
+
+		final long[] blockPosition = datablock.getGridPosition();
+		return attributes.getShardPositionForBlock(blockPosition);
+	}
+
+	@Override public long[] getPositionForBlock(DatasetAttributes attributes, final long... blockPosition) {
+
+		return attributes.getShardPositionForBlock(blockPosition);
+	}
+	@Override public DataBlockInputStream decode(DatasetAttributes attributes, long[] gridPosition, InputStream in) throws IOException {
+
+		return getArrayCodec().decode(attributes, gridPosition, in);
+	}
+
+	@Override public DataBlockOutputStream encode(DatasetAttributes attributes, DataBlock<?> dataBlock, OutputStream out) throws IOException {
+
+		return getArrayCodec().encode(attributes, dataBlock, out);
+	}
+
+	@Override public <T> void writeBlock(KeyValueAccess kva, String keyPath, DatasetAttributes datasetAttributes, DataBlock<T> dataBlock) {
+
+		final long[] shardPos = datasetAttributes.getShardPositionForBlock(dataBlock.getGridPosition());
+		new VirtualShard<T>(datasetAttributes, shardPos, kva, keyPath).writeBlock(dataBlock);
+	}
+
+	@Override public <T> DataBlock<T> readBlock(final KeyValueAccess kva, final String keyPath, final DatasetAttributes datasetAttributes, final long... gridPosition) {
+
+		final long[] shardPosition = datasetAttributes.getShardPositionForBlock(gridPosition);
+		return new VirtualShard<T>(datasetAttributes, shardPosition, kva, keyPath).getBlock(gridPosition);
+	}
+
+	public ShardIndex createIndex(final DatasetAttributes attributes) {
+		return new ShardIndex(attributes.getBlocksPerShard(), getIndexLocation(), getIndexCodecs());
+	}
+
+	@Override
+	public String getType() {
+
+		return TYPE;
+	}
+
+	public static IndexLocationAdapter indexLocationAdapter = new IndexLocationAdapter();
+
+	public static class IndexLocationAdapter implements JsonSerializer<IndexLocation>, JsonDeserializer<IndexLocation> {
+
+		@Override public IndexLocation deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
+
+			if (!json.isJsonPrimitive())
+				return null;
+
+			return IndexLocation.valueOf(json.getAsString().toUpperCase());
+		}
+
+		@Override public JsonElement serialize(IndexLocation src, Type typeOfSrc, JsonSerializationContext context) {
+
+			return new JsonPrimitive(src.name().toLowerCase());
+		}
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/shard/VirtualShard.java b/src/main/java/org/janelia/saalfeldlab/n5/shard/VirtualShard.java
new file mode 100644
index 00000000..3b115a90
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/shard/VirtualShard.java
@@ -0,0 +1,265 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.UncheckedIOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+import org.apache.commons.io.input.BoundedInputStream;
+import org.apache.commons.io.input.ProxyInputStream;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.LockedChannel;
+import org.janelia.saalfeldlab.n5.N5Exception;
+import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+
+public class VirtualShard<T> extends AbstractShard<T> {
+
+	final private KeyValueAccess keyValueAccess;
+	final private String path;
+
+	public VirtualShard(final DatasetAttributes datasetAttributes, long[] gridPosition,
+			final KeyValueAccess keyValueAccess, final String path) {
+
+		super(datasetAttributes, gridPosition, null);
+		this.keyValueAccess = keyValueAccess;
+		this.path = path;
+	}
+
+	public VirtualShard(final DatasetAttributes datasetAttributes, long[] gridPosition) {
+
+		this(datasetAttributes, gridPosition, null, null);
+	}
+
+	@SuppressWarnings("unchecked")
+	public DataBlock<T> getBlock(InputStream in, long... blockGridPosition) throws IOException {
+
+		ShardingCodec shardingCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+		final Codec.BytesCodec[] codecs = shardingCodec.getCodecs();
+		final Codec.ArrayCodec arrayCodec = shardingCodec.getArrayCodec();
+
+		final ProxyInputStream proxyIn = new ProxyInputStream(in) {
+			@Override
+			public void close() {
+				//nop
+			}
+		};
+		final Codec.DataBlockInputStream dataBlockStream = arrayCodec.decode(datasetAttributes, blockGridPosition, proxyIn);
+
+		final InputStream stream = Codec.decode(in, codecs);
+		final DataBlock<T> dataBlock = dataBlockStream.allocateDataBlock();
+		dataBlock.readData(dataBlockStream.getDataInput(stream));
+		stream.close();
+
+		return dataBlock;
+
+
+	}
+
+	@Override
+	public List<DataBlock<T>> getBlocks()  {
+		return getBlocks(IntStream.range(0, getNumBlocks()).toArray());
+	}
+
+	public List<DataBlock<T>> getBlocks(final int[] blockIndexes) {
+
+		// will not contain nulls
+		final ShardIndex index = getIndex();
+		final ArrayList<DataBlock<T>> blocks = new ArrayList<>();
+
+		if (index.isEmpty())
+			return blocks;
+
+		// sort index offsets
+		// and keep track of relevant positions
+		final long[] indexData = index.getData();
+		List<long[]> sortedOffsets = Arrays.stream(blockIndexes)
+				.mapToObj(i -> new long[]{indexData[i * 2], i})
+				.filter(x -> x[0] != ShardIndex.EMPTY_INDEX_NBYTES)
+				.sorted(Comparator.comparingLong(a -> ((long[])a)[0]))
+				.collect(Collectors.toList());
+
+		final int nd = getDatasetAttributes().getNumDimensions();
+		long[] position = new long[nd];
+
+		final int[] blocksPerShard = getDatasetAttributes().getBlocksPerShard();
+		final long[] blockGridMin = IntStream.range(0, nd)
+				.mapToLong(i -> blocksPerShard[i] * getGridPosition()[i])
+				.toArray();
+
+		long streamPosition = 0;
+		try (final LockedChannel lockedChannel = keyValueAccess.lockForReading(path)) {
+			try (final InputStream channelIn = lockedChannel.newInputStream()) {
+
+				for (long[] offsetIndex : sortedOffsets) {
+
+					final long offset = offsetIndex[0];
+					if (offset < 0)
+						continue;
+
+					final long idx = offsetIndex[1];
+					GridIterator.indexToPosition(idx, blocksPerShard, blockGridMin, position);
+
+					channelIn.skip(offset - streamPosition);
+					final long numBytes = index.getNumBytesByBlockIndex((int) idx);
+					final BoundedInputStream bIs = BoundedInputStream.builder().setInputStream(channelIn)
+							.setMaxCount(numBytes).get();
+
+					blocks.add(getBlock(bIs, position.clone()));
+					streamPosition = offset + numBytes;
+				}
+			}
+		} catch (final N5Exception.N5NoSuchKeyException e) {
+			return blocks;
+		} catch (final IOException | UncheckedIOException e) {
+			throw new N5IOException("Failed to read block from " + path, e);
+		}
+
+		return blocks;
+	}
+
+	@Override
+	public DataBlock<T> getBlock(long... blockGridPosition) {
+
+		final int[] relativePosition = getBlockPosition(blockGridPosition);
+		if (relativePosition == null)
+			throw new N5IOException("Attempted to read a block from the wrong shard.");
+
+		final ShardIndex idx = getIndex();
+		if (!idx.exists(relativePosition))
+			return null;
+
+		final long blockOffset = idx.getOffset(relativePosition);
+		final long blockSize = idx.getNumBytes(relativePosition);
+
+		try (final LockedChannel lockedChannel = keyValueAccess.lockForReading(path, blockOffset, blockSize)) {
+			try ( final InputStream in = lockedChannel.newInputStream()) {
+				final long[] blockPosInImg = getDatasetAttributes().getBlockPositionFromShardPosition(getGridPosition(), blockGridPosition);
+				return getBlock( in, blockPosInImg );
+			}
+		} catch (final N5Exception.N5NoSuchKeyException e) {
+			return null;
+		} catch (final IOException | UncheckedIOException e) {
+			throw new N5IOException("Failed to read block from " + path, e);
+		}
+	}
+
+	@Override
+	public void writeBlock(final DataBlock<T> block) {
+
+		final int[] relativePosition = getBlockPosition(block.getGridPosition());
+		if (relativePosition == null)
+			throw new N5IOException("Attempted to write block in the wrong shard.");
+
+		final ShardIndex index = getIndex();
+		long startByte = 0;
+		try {
+			startByte = keyValueAccess.size(path);
+		} catch (N5Exception.N5NoSuchKeyException e) {
+			startByte = index.getLocation() == ShardingCodec.IndexLocation.START ? index.numBytes() : 0;
+		} catch (IOException e) {
+			throw new N5IOException(e);
+		}
+		final long size = Long.MAX_VALUE - startByte;
+
+		try (final LockedChannel lockedChannel = keyValueAccess.lockForWriting(path, startByte, size)) {
+			try ( final OutputStream channelOut = lockedChannel.newOutputStream()) {
+				try (final CountingOutputStream out = new CountingOutputStream(channelOut)) {;
+					writeBlock(out, datasetAttributes, block);
+
+					/* Update and write the index to the shard*/
+					index.set(startByte, out.getNumBytes(), relativePosition);
+				}
+			}
+		} catch (final IOException | UncheckedIOException e) {
+			throw new N5IOException("Failed to write block to shard " + path, e);
+		}
+
+		try {
+			ShardIndex.write(index, keyValueAccess, path);
+		} catch (IOException e) {
+			throw new N5IOException("Failed to write index to shard " + path, e);
+		}
+	}
+
+	<T> void writeBlock(
+			final OutputStream out,
+			final DatasetAttributes datasetAttributes,
+			final DataBlock<T> dataBlock) throws IOException {
+
+		ShardingCodec shardingCodec = (ShardingCodec)datasetAttributes.getArrayCodec();
+		final Codec.BytesCodec[] codecs = shardingCodec.getCodecs();
+		final Codec.ArrayCodec arrayCodec = shardingCodec.getArrayCodec();
+		final Codec.DataBlockOutputStream dataBlockOutput = arrayCodec.encode(datasetAttributes, dataBlock, out);
+		final OutputStream stream = Codec.encode(dataBlockOutput, codecs);
+
+		dataBlock.writeData(dataBlockOutput.getDataOutput(stream));
+		stream.close();
+	}
+	public ShardIndex createIndex() {
+
+		// Empty index of the correct size
+		return ((ShardingCodec)getDatasetAttributes().getArrayCodec()).createIndex(getDatasetAttributes());
+	}
+
+	@Override
+	public ShardIndex getIndex() {
+
+		index = createIndex();
+		ShardIndex.read(keyValueAccess, path, index);
+
+		return index;
+	}
+
+	static class CountingOutputStream extends OutputStream {
+		private final OutputStream out;
+		private long numBytes;
+
+		public CountingOutputStream(OutputStream out) {
+			this.out = out;
+			this.numBytes = 0;
+		}
+
+		@Override
+		public void write(int b) throws IOException {
+			out.write(b);
+			numBytes++;
+		}
+
+		@Override
+		public void write(byte[] b) throws IOException {
+			out.write(b);
+			numBytes += b.length;
+		}
+
+		@Override
+		public void write(byte[] b, int off, int len) throws IOException {
+			out.write(b, off, len);
+			numBytes += len;
+		}
+
+		@Override
+		public void flush() throws IOException {
+			out.flush();
+		}
+
+		@Override
+		public void close() throws IOException {
+
+		}
+
+		public long getNumBytes() {
+			return numBytes;
+		}
+	}
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/util/FinalPosition.java b/src/main/java/org/janelia/saalfeldlab/n5/util/FinalPosition.java
new file mode 100644
index 00000000..1b7076d5
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/util/FinalPosition.java
@@ -0,0 +1,38 @@
+package org.janelia.saalfeldlab.n5.util;
+
+/*
+ * An immutable {@Position}.
+ */
+public class FinalPosition implements Position {
+
+	public final long[] position;
+
+	public FinalPosition(long[] position) {
+		this.position = position;
+	}
+
+	public FinalPosition(Position p) {
+		this.position = p.get().clone();
+	}
+
+	@Override
+	public long[] get() {
+		return position;
+	}
+
+	@Override
+	public long get(int i) {
+		return position[i];
+	}
+
+	@Override
+	public String toString() {
+		return Position.toString(this);
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		return Position.equals(this, obj);
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/util/GridIterator.java b/src/main/java/org/janelia/saalfeldlab/n5/util/GridIterator.java
new file mode 100644
index 00000000..67ee3c42
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/util/GridIterator.java
@@ -0,0 +1,169 @@
+package org.janelia.saalfeldlab.n5.util;
+
+import java.util.Iterator;
+
+/**
+ * Essentially imglib2's IntervalIterator, but N5 does not depend on imglib2.
+ */
+public class GridIterator implements Iterator<long[]> {
+
+	final protected long[] dimensions;
+
+	final protected long[] steps;
+
+	final protected long[] position;
+
+	final protected long[] min;
+
+	final protected int lastIndex;
+
+	protected int index = -1;
+
+	public GridIterator(final long[] dimensions, final long[] min) {
+
+		final int n = dimensions.length;
+		this.dimensions = new long[n];
+		this.position = new long[n];
+		this.min = min;
+		steps = new long[n];
+
+		final int m = n - 1;
+		long k = steps[0] = 1;
+		for (int d = 0; d < m;) {
+			final long dimd = dimensions[d];
+			this.dimensions[d] = dimd;
+			k *= dimd;
+			steps[++d] = k;
+		}
+		final long dimm = dimensions[m];
+		this.dimensions[m] = dimm;
+		lastIndex = (int)(k * dimm - 1);
+	}
+
+	public GridIterator(final long[] dimensions) {
+
+		this(dimensions, new long[dimensions.length]);
+	}
+
+	public GridIterator(final int[] dimensions) {
+
+		this(int2long(dimensions));
+	}
+
+	public void fwd() {
+		++index;
+	}
+
+	public void reset() {
+		index = -1;
+	}
+
+	@Override
+	public boolean hasNext() {
+		return index < lastIndex;
+	}
+
+	@Override
+	public long[] next() {
+		fwd();
+		indexToPosition(index, dimensions, min, position);
+		return position;
+	}
+
+	public int[] nextAsInt() {
+		return long2int(next());
+	}
+
+	public int getIndex() {
+		return index;
+	}
+
+	final static public void indexToPosition(long index, final long[] dimensions, final long[] offset,
+			final long[] position) {
+		for (int dim = 0; dim < dimensions.length; dim++) {
+			position[dim] = (index % dimensions[dim]) + offset[dim];
+			index /= dimensions[dim];
+		}
+	}
+
+	final static public void indexToPosition(long index, final long[] dimensions, final long[] position) {
+		for (int dim = 0; dim < dimensions.length; dim++) {
+			position[dim] = index % dimensions[dim];
+			index /= dimensions[dim];
+		}
+	}
+
+	final static public void indexToPosition(long index, final int[] dimensions, final long[] offset,
+			final long[] position) {
+		for (int dim = 0; dim < dimensions.length; dim++) {
+			position[dim] = (index % dimensions[dim]) + offset[dim];
+			index /= dimensions[dim];
+		}
+	}
+
+	final static public void indexToPosition(long index, final int[] dimensions, final long[] position) {
+		for (int dim = 0; dim < dimensions.length; dim++) {
+			position[dim] = index % dimensions[dim];
+			index /= dimensions[dim];
+		}
+	}
+
+	final static public long positionToIndex(final long[] dimensions, final long[] position) {
+		long idx = position[0];
+		int cumulativeSize = 1;
+		for (int i = 0; i < position.length; i++) {
+			idx += position[i] * cumulativeSize;
+			cumulativeSize *= dimensions[i];
+		}
+		return idx;
+	}
+
+	final static public long positionToIndex(final long[] dimensions, final int[] position) {
+		long idx = position[0];
+		int cumulativeSize = 1;
+		for (int i = 0; i < position.length; i++) {
+			idx += position[i] * cumulativeSize;
+			cumulativeSize *= dimensions[i];
+		}
+		return idx;
+	}
+
+	final static public long positionToIndex(final int[] dimensions, final int[] position) {
+		long idx = position[0];
+		int cumulativeSize = dimensions[0];
+		for (int i = 1; i < position.length; i++) {
+			idx += position[i] * cumulativeSize;
+			cumulativeSize *= dimensions[i];
+		}
+		return idx;
+	}
+
+	final static public long positionToIndex(final int[] dimensions, final long[] position) {
+		long idx = position[0];
+		int cumulativeSize = dimensions[0];
+		for (int i = 1; i < position.length; i++) {
+			idx += position[i] * cumulativeSize;
+			cumulativeSize *= dimensions[i];
+		}
+		return idx;
+	}
+
+	final static public int[] long2int(final long[] a) {
+		final int[] i = new int[a.length];
+
+		for (int d = 0; d < a.length; ++d)
+			i[d] = (int) a[d];
+
+		return i;
+	}
+
+	final static public long[] int2long(final int[] i) {
+		final long[] l = new long[i.length];
+
+		for (int d = 0; d < l.length; ++d)
+			l[d] = i[d];
+
+		return l;
+	}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/util/Position.java b/src/main/java/org/janelia/saalfeldlab/n5/util/Position.java
new file mode 100644
index 00000000..5ddb8cf0
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/util/Position.java
@@ -0,0 +1,62 @@
+package org.janelia.saalfeldlab.n5.util;
+
+import java.util.Arrays;
+
+/*
+ * A wrapper around a primitive long array that is lexicographically {@link Comparable}
+ * and for which we can test equality.
+ */
+public interface Position extends Comparable<Position> {
+
+	public long[] get();
+
+	public long get(int i);
+
+	default int numDimensions() {
+		return get().length;
+	}
+
+	@Override
+	default int compareTo(Position other) {
+
+		// use Arrays.compare when we update to Java 9+
+		final int N = numDimensions() > other.numDimensions() ? numDimensions() : other.numDimensions();
+		for (int i = 0; i < N; i++) {
+			final long diff = get(i) - other.get(i);
+			if (diff != 0)
+				return (int) diff;
+		}
+		return 0;
+	}
+
+	public static boolean equals(final Position a, final Object b) {
+
+		if (a == null && b == null)
+			return true;
+
+		if (b == null)
+			return false;
+
+		if (!(b instanceof Position))
+			return false;
+
+		final Position other = (Position) b;
+		if (other.numDimensions() != a.numDimensions())
+			return false;
+
+		for (int i = 0; i < a.numDimensions(); i++)
+			if (other.get(i) != a.get(i))
+				return false;
+
+		return true;
+	}
+
+	public static String toString(Position p) {
+		return "Position: " + Arrays.toString(p.get());
+	}
+
+	public static Position wrap(final long[] p) {
+		return new FinalPosition(p);
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/AbstractN5Test.java b/src/test/java/org/janelia/saalfeldlab/n5/AbstractN5Test.java
index 6e2e9e8f..9b1e0831 100644
--- a/src/test/java/org/janelia/saalfeldlab/n5/AbstractN5Test.java
+++ b/src/test/java/org/janelia/saalfeldlab/n5/AbstractN5Test.java
@@ -50,6 +50,9 @@
 
 import org.janelia.saalfeldlab.n5.N5Exception.N5ClassCastException;
 import org.janelia.saalfeldlab.n5.N5Reader.Version;
+import org.janelia.saalfeldlab.n5.codec.AsTypeCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.N5BlockCodec;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -89,7 +92,7 @@ public abstract class AbstractN5Test {
 
 	protected final HashSet<N5Writer> tempWriters = new HashSet<>();
 
-	protected final N5Writer createTempN5Writer() {
+	public final N5Writer createTempN5Writer() {
 
 		try {
 			return createTempN5Writer(tempN5Location());
@@ -117,6 +120,7 @@ protected final N5Writer createTempN5Writer(String location, GsonBuilder gson) {
 
 	@After
 	public void removeTempWriters() {
+
 		synchronized (tempWriters) {
 			for (final N5Writer writer : tempWriters) {
 				try {
@@ -208,11 +212,11 @@ public void testSetAttributeDoesntCreateGroup() {
 	}
 
 	@Test
-	public void testCreateDataset()  {
+	public void testCreateDataset() {
 
 		final DatasetAttributes info;
 		try (N5Writer writer = createTempN5Writer()) {
-			writer.createDataset(datasetName, dimensions, blockSize, DataType.UINT64, new RawCompression());
+			writer.createDataset(datasetName, dimensions, blockSize, DataType.UINT64);
 
 			assertTrue("Dataset does not exist", writer.exists(datasetName));
 
@@ -246,6 +250,33 @@ public void testWriteReadByteBlock() {
 		}
 	}
 
+	@Test
+	public void testWriteReadByteBlockMultipleCodecs() {
+
+		/*TODO: this tests "passes" in the sense that we get the correct output, but it
+		*  maybe is not the behavior we actually want*/
+		try (final N5Writer n5 = createTempN5Writer()) {
+			final Codec[] codecs = {
+					new N5BlockCodec(),
+					new AsTypeCodec(DataType.INT32, DataType.INT8),
+					new AsTypeCodec(DataType.INT64, DataType.INT32),
+			};
+			final long[] longBlock1 = new long[]{1,2,3,4,5,6,7,8};
+			final long[] dimensions1 = new long[]{2,2,2};
+			final int[] blockSize1 = new int[]{2,2,2};
+			n5.createDataset(datasetName, dimensions1, blockSize1, DataType.INT8, codecs);
+			final DatasetAttributes attributes = n5.getDatasetAttributes(datasetName);
+			final LongArrayDataBlock dataBlock = new LongArrayDataBlock(blockSize1, new long[]{0, 0, 0}, longBlock1);
+			n5.writeBlock(datasetName, attributes, dataBlock);
+
+			final DatasetAttributes fakeAttributes = new DatasetAttributes(dimensions1, blockSize1, DataType.INT64, codecs);
+			final DataBlock<?> loadedDataBlock = n5.readBlock(datasetName, fakeAttributes, 0, 0, 0);
+			assertArrayEquals(longBlock1, (long[])loadedDataBlock.getData());
+			assertTrue(n5.remove(datasetName));
+
+		}
+	}
+
 	@Test
 	public void testWriteReadStringBlock() {
 
@@ -305,7 +336,7 @@ public void testWriteReadIntBlock() {
 					DataType.INT32}) {
 
 				try (final N5Writer n5 = createTempN5Writer()) {
-					n5.createDataset(datasetName, dimensions, blockSize, dataType, compression);
+					n5.createDataset(datasetName, dimensions, blockSize, dataType, (Codec)compression);
 					final DatasetAttributes attributes = n5.getDatasetAttributes(datasetName);
 					final IntArrayDataBlock dataBlock = new IntArrayDataBlock(blockSize, new long[]{0, 0, 0}, intBlock);
 					n5.writeBlock(datasetName, attributes, dataBlock);
@@ -465,7 +496,7 @@ public void testOverwriteBlock() {
 	}
 
 	@Test
-	public void testAttributeParsingPrimitive()  {
+	public void testAttributeParsingPrimitive() {
 
 		try (final N5Writer n5 = createTempN5Writer()) {
 
@@ -541,7 +572,7 @@ public void testAttributeParsingPrimitive()  {
 	}
 
 	@Test
-	public void testAttributes()  {
+	public void testAttributes() {
 
 		try (final N5Writer n5 = createTempN5Writer()) {
 			assertNull(n5.getAttribute(groupName, "test", String.class));
@@ -607,7 +638,6 @@ public void testAttributes()  {
 		}
 	}
 
-
 	@Test
 	public void testNullAttributes() throws URISyntaxException, IOException {
 
@@ -831,7 +861,7 @@ public void testUri() throws IOException, URISyntaxException {
 	}
 
 	@Test
-	public void testRemoveGroup()  {
+	public void testRemoveGroup() {
 
 		try (final N5Writer n5 = createTempN5Writer()) {
 			n5.createDataset(datasetName, dimensions, blockSize, DataType.UINT64, new RawCompression());
@@ -880,7 +910,7 @@ public void testDeepList() throws ExecutionException, InterruptedException {
 			for (final String subGroup : subGroupNames)
 				assertTrue("deepList contents", Arrays.asList(n5.deepList("")).contains(groupName.replaceFirst("/", "") + "/" + subGroup));
 
-			final DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions, blockSize, DataType.UINT64, new RawCompression());
+			final DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions, blockSize, DataType.UINT64);
 			final LongArrayDataBlock dataBlock = new LongArrayDataBlock(blockSize, new long[]{0, 0, 0}, new long[blockNumElements]);
 			n5.createDataset(datasetName, datasetAttributes);
 			n5.writeBlock(datasetName, datasetAttributes, dataBlock);
@@ -982,7 +1012,7 @@ public void testDeepList() throws ExecutionException, InterruptedException {
 	}
 
 	@Test
-	public void testExists()  {
+	public void testExists() {
 
 		final String groupName2 = groupName + "-2";
 		final String datasetName2 = datasetName + "-2";
@@ -1003,7 +1033,7 @@ public void testExists()  {
 	}
 
 	@Test
-	public void testListAttributes()  {
+	public void testListAttributes() {
 
 		try (N5Writer n5 = createTempN5Writer()) {
 			final String groupName2 = groupName + "-2";
@@ -1106,7 +1136,7 @@ public void testReaderCreation() throws IOException, URISyntaxException {
 			writer.setAttribute("/", N5Reader.VERSION_KEY, invalidVersion);
 			assertThrows("Incompatible version throws error", N5Exception.class, () -> {
 				try (final N5Reader ignored = createN5Reader(location)) {
-					 /*Only try with resource to ensure `close()` is called.*/
+					/*Only try with resource to ensure `close()` is called.*/
 				}
 			});
 		} finally {
@@ -1123,7 +1153,7 @@ public void testReaderCreation() throws IOException, URISyntaxException {
 	}
 
 	@Test
-	public void testDelete()  {
+	public void testDelete() {
 
 		try (N5Writer n5 = createTempN5Writer()) {
 			final String datasetName = AbstractN5Test.datasetName + "-test-delete";
@@ -1209,7 +1239,7 @@ protected static void runTests(final N5Writer writer, final ArrayList<TestData<?
 	}
 
 	@Test
-	public void testAttributePaths()  {
+	public void testAttributePaths() {
 
 		try (final N5Writer writer = createTempN5Writer()) {
 
@@ -1314,7 +1344,7 @@ public void testAttributePaths()  {
 	}
 
 	@Test
-	public void testAttributePathEscaping()  {
+	public void testAttributePathEscaping() {
 
 		final JsonObject emptyObj = new JsonObject();
 
@@ -1401,7 +1431,7 @@ private String jsonKeyVal(final String key, final String val) {
 
 	@Test
 	public void
-	testRootLeaves()  {
+	testRootLeaves() {
 
 		/* Test retrieving non-JsonObject root leaves */
 		try (final N5Writer n5 = createTempN5Writer()) {
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccessTest.java b/src/test/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccessTest.java
index e902eb88..a42f8e07 100644
--- a/src/test/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccessTest.java
+++ b/src/test/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccessTest.java
@@ -7,9 +7,6 @@
 
 import java.nio.file.FileSystems;
 import java.nio.file.Paths;
-import java.util.Arrays;
-
-import org.junit.BeforeClass;
 import org.junit.Test;
 
 
@@ -47,12 +44,6 @@ public class FileSystemKeyValueAccessTest {
 			{""}
 	};
 
-	/**
-	 * @throws java.lang.Exception
-	 */
-	@BeforeClass
-	public static void setUpBeforeClass() throws Exception {}
-
 	@Test
 	public void testComponents() {
 
@@ -61,7 +52,6 @@ public void testComponents() {
 		for (int i = 0; i < testPaths.length; ++i) {
 
 			final String[] components = access.components(testPaths[i]);
-			System.out.println(String.format("%d: %s -> %s", i, testPaths[i], Arrays.toString(components)));
 
 			assertArrayEquals(testPathComponents[i], components);
 		}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/N5FSTest.java b/src/test/java/org/janelia/saalfeldlab/n5/N5FSTest.java
index bd1e43aa..da0a38ec 100644
--- a/src/test/java/org/janelia/saalfeldlab/n5/N5FSTest.java
+++ b/src/test/java/org/janelia/saalfeldlab/n5/N5FSTest.java
@@ -199,7 +199,6 @@ public void testWriteLock() throws IOException {
 	@Test
 	public void testLockReleaseByReader() throws IOException, ExecutionException, InterruptedException, TimeoutException {
 
-		System.out.println("Testing lock release by Reader.");
 
 		final Path path = Paths.get(tempN5PathName(), "lock");
 		final LockedChannel lock = access.lockForWriting(path);
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/codec/AsTypeTests.java b/src/test/java/org/janelia/saalfeldlab/n5/codec/AsTypeTests.java
new file mode 100644
index 00000000..59aa3298
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/codec/AsTypeTests.java
@@ -0,0 +1,73 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.junit.Test;
+
+public class AsTypeTests {
+
+	@Test
+	public void testInt2Byte() throws IOException {
+
+		final int N = 16;
+		final ByteBuffer intsAsBuffer = ByteBuffer.allocate(Integer.BYTES * N);
+		final byte[] encodedBytes = new byte[N];
+		for (int i = 0; i < N; i++) {
+			intsAsBuffer.putInt(i);
+			encodedBytes[i] = (byte)i;
+		}
+
+		final byte[] decodedInts = intsAsBuffer.array();
+		testEncodingAndDecoding(new AsTypeCodec(DataType.INT32, DataType.INT8), encodedBytes, decodedInts);
+		testEncodingAndDecoding(new AsTypeCodec(DataType.INT8, DataType.INT32), decodedInts, encodedBytes);
+	}
+
+	@Test
+	public void testDouble2Byte() throws IOException {
+
+		final int N = 16;
+		final ByteBuffer doublesAsBuffer = ByteBuffer.allocate(Double.BYTES * N);
+		final byte[] encodedBytes = new byte[N];
+		for (int i = 0; i < N; i++) {
+			doublesAsBuffer.putDouble(i);
+			encodedBytes[i] = (byte)i;
+		}
+		final byte[] decodedDoubles = doublesAsBuffer.array();
+
+		testEncodingAndDecoding(new AsTypeCodec(DataType.FLOAT64, DataType.INT8), encodedBytes, decodedDoubles);
+		testEncodingAndDecoding(new AsTypeCodec(DataType.INT8, DataType.FLOAT64), decodedDoubles, encodedBytes);
+	}
+
+	public static void testEncodingAndDecoding(Codec.BytesCodec codec, byte[] encodedBytes, byte[] decodedBytes) throws IOException {
+
+		testEncoding(codec, encodedBytes, decodedBytes);
+		testDecoding(codec, decodedBytes, encodedBytes);
+	}
+
+	public static void testDecoding(final Codec.BytesCodec codec, final byte[] expected, final byte[] input) throws IOException {
+
+		final InputStream result = codec.decode(new ByteArrayInputStream(input));
+		for (int i = 0; i < expected.length; i++)
+			assertEquals(expected[i], (byte)result.read());
+	}
+
+	public static void testEncoding(final Codec.BytesCodec codec, final byte[] expected, final byte[] data) throws IOException {
+
+		final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length);
+		final OutputStream encodedStream = codec.encode(outputStream);
+		encodedStream.write(data);
+		encodedStream.flush();
+		assertArrayEquals(expected, outputStream.toByteArray());
+		encodedStream.close();
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/codec/BytesTests.java b/src/test/java/org/janelia/saalfeldlab/n5/codec/BytesTests.java
new file mode 100644
index 00000000..ea24a7d9
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/codec/BytesTests.java
@@ -0,0 +1,53 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteOrder;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.N5Writer;
+import org.janelia.saalfeldlab.n5.NameConfigAdapter;
+import org.janelia.saalfeldlab.n5.RawCompression;
+import org.janelia.saalfeldlab.n5.universe.N5Factory;
+import org.junit.Test;
+
+import com.google.gson.GsonBuilder;
+
+public class BytesTests {
+
+	@Test
+	public void testSerialization() {
+
+		final N5Factory factory = new N5Factory();
+		factory.cacheAttributes(false);
+		final GsonBuilder gsonBuilder = new GsonBuilder();
+		gsonBuilder.registerTypeHierarchyAdapter(Codec.class, NameConfigAdapter.getJsonAdapter(Codec.class));
+		gsonBuilder.registerTypeAdapter(ByteOrder.class, RawBytes.byteOrderAdapter);
+		factory.gsonBuilder(gsonBuilder);
+
+		final N5Writer reader = factory.openWriter("n5:src/test/resources/shardExamples/test.n5");
+		final Codec bytes = reader.getAttribute("mid_sharded", "codecs[0]/configuration/codecs[0]", Codec.class);
+		assertTrue("as RawBytes", bytes instanceof RawBytes);
+
+		final N5Writer writer = factory.openWriter("n5:src/test/resources/shardExamples/test.n5");
+
+		final DatasetAttributes datasetAttributes = new DatasetAttributes(
+				new long[]{8, 8},
+				new int[]{4, 4},
+				DataType.UINT8,
+						new N5BlockCodec(ByteOrder.LITTLE_ENDIAN),
+						new IdentityCodec()
+		);
+		writer.createGroup("shard"); //Should already exist, but this will ensure.
+		writer.setAttribute("shard", "/", datasetAttributes);
+		final DatasetAttributes deserialized = writer.getAttribute("shard", "/", DatasetAttributes.class);
+
+		assertEquals("1 codecs", 1, deserialized.getCodecs().length);
+		assertTrue("Identity", deserialized.getCodecs()[0] instanceof IdentityCodec);
+		assertTrue("Bytes", deserialized.getArrayCodec() instanceof N5BlockCodec);
+		assertEquals("LittleEndian", ByteOrder.LITTLE_ENDIAN,
+				((N5BlockCodec)deserialized.getArrayCodec()).byteOrder);
+	}
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedConvertedInputStreamTest.java b/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedConvertedInputStreamTest.java
new file mode 100644
index 00000000..27c744fa
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedConvertedInputStreamTest.java
@@ -0,0 +1,86 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.stream.IntStream;
+
+import org.junit.Test;
+
+public class FixedConvertedInputStreamTest {
+
+	@Test
+	public void testLengthOne() throws IOException
+	{
+
+		final byte expected = 5;
+		final byte[] data = new byte[32];
+		Arrays.fill(data, expected);
+
+		final FixedLengthConvertedInputStream convertedId = new FixedLengthConvertedInputStream(1, 1,
+				AsTypeCodec::IDENTITY_ONE,
+				new ByteArrayInputStream(data));
+
+		final FixedLengthConvertedInputStream convertedPlusOne = new FixedLengthConvertedInputStream(1, 1,
+				(x, y) -> {
+					y.put((byte)(x.get() + 1));
+				},
+				new ByteArrayInputStream(data));
+
+		for (int i = 0; i < 32; i++) {
+			assertEquals(expected, convertedId.read());
+			assertEquals(expected + 1, convertedPlusOne.read());
+		}
+
+		convertedId.close();
+		convertedPlusOne.close();
+	}
+
+	@Test
+	public void testIntToByte() throws IOException
+	{
+
+		final int N = 16;
+		final ByteBuffer buf = ByteBuffer.allocate(Integer.BYTES * N);
+		IntStream.range(0, N).forEach( x -> {
+			buf.putInt(x);
+		});
+
+		final byte[] data = buf.array();
+		final FixedLengthConvertedInputStream intToByte = new FixedLengthConvertedInputStream(
+				4, 1,
+				AsTypeCodec::INT_TO_BYTE,
+				new ByteArrayInputStream(data));
+
+		for( int i = 0; i < N; i++ )
+			assertEquals((byte)i, intToByte.read());
+
+		intToByte.close();
+	}
+
+	@Test
+	public void testByteToInt() throws IOException
+	{
+
+		final int N = 16;
+		final byte[] data = new byte[16];
+		for( int i = 0; i < N; i++ )
+			data[i] = (byte)i;
+
+		final FixedLengthConvertedInputStream byteToInt = new FixedLengthConvertedInputStream(
+				1, 4, AsTypeCodec::BYTE_TO_INT,
+				new ByteArrayInputStream(data));
+
+		final DataInputStream dataStream = new DataInputStream(byteToInt);
+		for( int i = 0; i < N; i++ )
+			assertEquals(i, dataStream.readInt());
+
+		dataStream.close();
+		byteToInt.close();
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedConvertedOutputStreamTest.java b/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedConvertedOutputStreamTest.java
new file mode 100644
index 00000000..f8cf5215
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedConvertedOutputStreamTest.java
@@ -0,0 +1,96 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.stream.IntStream;
+
+import org.junit.Test;
+
+public class FixedConvertedOutputStreamTest {
+
+	@Test
+	public void testLengthOne() throws IOException
+	{
+		final int N = 2;
+		final byte expected = 5;
+		final byte expectedPlusOne = 6;
+		final byte[] expectedData = new byte[N];
+		Arrays.fill(expectedData, expected);
+
+		final byte[] expectedPlusOneData = new byte[N];
+		Arrays.fill(expectedPlusOneData, expectedPlusOne);
+
+		final ByteArrayOutputStream outId = new ByteArrayOutputStream(N);
+		final FixedLengthConvertedOutputStream convertedId = new FixedLengthConvertedOutputStream(1, 1,
+				AsTypeCodec::IDENTITY_ONE,
+				outId);
+
+		convertedId.write(expectedData);
+		convertedId.flush();
+		convertedId.close();
+
+		assertArrayEquals(expectedData, outId.toByteArray());
+
+
+		final ByteArrayOutputStream outPlusOne = new ByteArrayOutputStream(N);
+		final FixedLengthConvertedOutputStream convertedPlusOne = new FixedLengthConvertedOutputStream(1, 1,
+				(x, y) -> y.put((byte)(x.get() + 1)),
+				outPlusOne);
+
+		convertedPlusOne.write(expectedData);
+		convertedPlusOne.close();
+		assertArrayEquals(expectedPlusOneData, outPlusOne.toByteArray());
+	}
+
+	@Test
+	public void testIntToByte() throws IOException
+	{
+
+		final int N = 16;
+		final ByteBuffer buf = ByteBuffer.allocate(Integer.BYTES * N);
+		IntStream.range(0, N).forEach(buf::putInt);
+
+		final ByteBuffer expected = ByteBuffer.allocate(N);
+		IntStream.range(0, N).forEach( x -> expected.put((byte)x));
+
+		final ByteArrayOutputStream outStream = new ByteArrayOutputStream(N);
+		final FixedLengthConvertedOutputStream intToByte = new FixedLengthConvertedOutputStream(
+				4, 1,
+				AsTypeCodec::INT_TO_BYTE,
+				outStream);
+
+		intToByte.write(buf.array());
+		intToByte.close();
+
+		assertArrayEquals(expected.array(), outStream.toByteArray());
+	}
+	@Test
+	public void testByteToInt() throws IOException
+	{
+
+		final int N = 16;
+		final byte[] data = new byte[16];
+		for( int i = 0; i < N; i++ )
+			data[i] = (byte)i;
+
+		FixedLengthConvertedInputStream byteToInt = new FixedLengthConvertedInputStream(
+				1, 4,
+				(input, output) -> output.putInt(input.get()),
+				new ByteArrayInputStream(data));
+
+		final DataInputStream dataStream = new DataInputStream(byteToInt);
+		for( int i = 0; i < N; i++ )
+			assertEquals(i, dataStream.readInt());
+
+		dataStream.close();
+		byteToInt.close();
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedScaleOffsetTests.java b/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedScaleOffsetTests.java
new file mode 100644
index 00000000..135a7f2b
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/codec/FixedScaleOffsetTests.java
@@ -0,0 +1,60 @@
+package org.janelia.saalfeldlab.n5.codec;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.stream.DoubleStream;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.junit.Test;
+
+public class FixedScaleOffsetTests {
+
+	@Test
+	public void testDouble2Byte() throws IOException {
+
+		final int N = 16;
+		final double[] doubles = DoubleStream.iterate(0.0, x -> x + 1).limit(N).toArray();
+		final ByteBuffer encodedDoubles = ByteBuffer.allocate(Double.BYTES * N);
+		final byte[] bytes = new byte[N];
+
+		final double scale = 2;
+		final double offset = 1;
+
+		for (int i = 0; i < N; i++) {
+			final double val = (scale * doubles[i] + offset);
+			bytes[i] = (byte)val;
+			encodedDoubles.putDouble(i);
+		}
+
+		final FixedScaleOffsetCodec double2Byte = new FixedScaleOffsetCodec(scale, offset, DataType.FLOAT64, DataType.INT8);
+		AsTypeTests.testEncoding(double2Byte, bytes, encodedDoubles.array());
+		AsTypeTests.testDecoding(double2Byte, encodedDoubles.array(), bytes);
+	}
+
+	@Test
+	public void testLong2Short() throws IOException {
+
+		final int N = 16;
+		final ByteBuffer encodedLongs = ByteBuffer.allocate(Long.BYTES * N);
+		final ByteBuffer encodedShorts = ByteBuffer.allocate(Short.BYTES * N);
+
+		final long scale = 2;
+		final long offset = 1;
+
+		for (int i = 0; i < N; i++) {
+			final long val = (scale * i + offset);
+			encodedShorts.putShort((short)val);
+			encodedLongs.putLong(i);
+		}
+
+		final byte[] shortBytes = encodedShorts.array();
+		final byte[] longBytes = encodedLongs.array();
+
+		final FixedScaleOffsetCodec long2short = new FixedScaleOffsetCodec(scale, offset, DataType.INT64, DataType.INT16);
+		AsTypeTests.testEncoding(long2short, shortBytes, longBytes);
+		AsTypeTests.testDecoding(long2short, longBytes, shortBytes);
+	}
+
+
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/demo/BlockIterators.java b/src/test/java/org/janelia/saalfeldlab/n5/demo/BlockIterators.java
new file mode 100644
index 00000000..c96edc07
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/demo/BlockIterators.java
@@ -0,0 +1,95 @@
+package org.janelia.saalfeldlab.n5.demo;
+
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Spliterator;
+import java.util.Spliterators;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.RawCompression;
+import org.janelia.saalfeldlab.n5.ShardedDatasetAttributes;
+import org.janelia.saalfeldlab.n5.codec.RawBytes;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+
+public class BlockIterators {
+
+	public static void main(String[] args) {
+
+//		blockIterator();
+		shardBlockIterator();
+	}
+
+	public static void shardBlockIterator() {
+
+		final ShardedDatasetAttributes attrs = new ShardedDatasetAttributes(
+				new long[] {12, 8},	// image size
+				new int[] {6, 4},		// shard size
+				new int[] {2, 2},		// block size
+				DataType.UINT8,
+				new Codec[] { new RawBytes() },
+				new DeterministicSizeCodec[] { new RawBytes() },
+				IndexLocation.END);
+
+		shardPositions(attrs)
+			.forEach(x -> System.out.println(Arrays.toString(x)));
+	}
+
+	public static void blockIterator() {
+
+		final DatasetAttributes attrs = new DatasetAttributes(
+				new long[] {12, 8},
+				new int[] {2, 2},
+				DataType.UINT8,
+				new RawCompression());
+
+		blockPositions(attrs).forEach(x -> System.out.println(Arrays.toString(x)));
+	}
+	
+	public static long[] blockGridSize(final DatasetAttributes attrs ) {
+		// this could be a nice method for DatasetAttributes
+
+		return IntStream.range(0, attrs.getNumDimensions()).mapToLong(i -> {
+			return (long)Math.ceil(attrs.getDimensions()[i] / attrs.getBlockSize()[i]);
+		}).toArray();
+
+	}
+	
+	public static long[] shardGridSize(final ShardedDatasetAttributes attrs ) {
+		// this could be a nice method for DatasetAttributes
+
+		return IntStream.range(0, attrs.getNumDimensions()).mapToLong(i -> {
+			return (long)Math.ceil(attrs.getDimensions()[i] / attrs.getShardSize()[i]);
+		}).toArray();
+
+	}
+
+	public static Stream<long[]> blockPositions( DatasetAttributes attrs ) {
+		return toStream(new GridIterator(blockGridSize(attrs)));
+	}
+
+	public static Stream<long[]> shardPositions( ShardedDatasetAttributes attrs ) {
+
+		final int[] blocksPerShard = attrs.getBlocksPerShard();
+		return toStream( new GridIterator(shardGridSize(attrs)))
+				.flatMap( shardPosition -> {
+
+					final int nd = attrs.getNumDimensions();
+					final long[] min = attrs.getBlockPositionFromShardPosition(shardPosition, new long[nd]);
+					return toStream(new GridIterator(GridIterator.int2long(blocksPerShard), min));
+				});
+	}
+
+	public static <T> Stream<T> toStream( final Iterator<T> it ) {
+		return StreamSupport.stream( Spliterators.spliteratorUnknownSize(
+				  it, Spliterator.ORDERED),
+		          false);
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/serialization/CodecSerialization.java b/src/test/java/org/janelia/saalfeldlab/n5/serialization/CodecSerialization.java
new file mode 100644
index 00000000..0610c7c5
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/serialization/CodecSerialization.java
@@ -0,0 +1,91 @@
+package org.janelia.saalfeldlab.n5.serialization;
+
+import static org.janelia.saalfeldlab.n5.NameConfigAdapter.getJsonAdapter;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.GsonUtils;
+import org.janelia.saalfeldlab.n5.GzipCompression;
+import org.janelia.saalfeldlab.n5.NameConfigAdapter;
+import org.janelia.saalfeldlab.n5.codec.AsTypeCodec;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.FixedScaleOffsetCodec;
+import org.janelia.saalfeldlab.n5.codec.IdentityCodec;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+public class CodecSerialization {
+
+	private Gson gson;
+
+	@Before
+	public void before() {
+
+
+		final GsonBuilder gsonBuilder = new GsonBuilder();
+		GsonUtils.registerGson(gsonBuilder);
+		gson = gsonBuilder.create();
+	}
+
+	@Test
+	public void testSerializeIdentity() {
+
+		final IdentityCodec id = new IdentityCodec();
+		final JsonObject jsonId = gson.toJsonTree(id).getAsJsonObject();
+		final JsonElement expected = gson.fromJson("{\"name\":\"id\"}", JsonElement.class);
+		assertEquals("identity", expected, jsonId.getAsJsonObject());
+	}
+
+	@Test
+	public void testSerializeAsType() {
+
+		final AsTypeCodec asTypeCodec = new AsTypeCodec(DataType.FLOAT64, DataType.INT16);
+		final JsonObject jsonAsType = gson.toJsonTree(asTypeCodec).getAsJsonObject();
+		final JsonElement expected = gson.fromJson(
+				"{\"name\":\"astype\",\"configuration\":{\"dataType\":\"float64\",\"encodedType\":\"int16\"}}",
+				JsonElement.class);
+		assertEquals("asType", expected, jsonAsType.getAsJsonObject());
+	}
+
+	@Test
+	public void testSerializeCodecArray() {
+
+		Codec[] codecs = new Codec[]{
+				new IdentityCodec(),
+				new AsTypeCodec(DataType.FLOAT64, DataType.INT16)
+		};
+		JsonArray jsonCodecArray = gson.toJsonTree(codecs).getAsJsonArray();
+		JsonElement expected = gson.fromJson(
+				"[{\"name\":\"id\"},{\"name\":\"astype\",\"configuration\":{\"dataType\":\"float64\",\"encodedType\":\"int16\"}}]",
+				JsonElement.class);
+		assertEquals("codec array", expected, jsonCodecArray.getAsJsonArray());
+
+		Codec[] codecsDeserialized = gson.fromJson(expected, Codec[].class);
+		assertEquals("codecs length not 2", 2, codecsDeserialized.length);
+		assertTrue("first codec not identity", codecsDeserialized[0] instanceof IdentityCodec);
+		assertTrue("second codec not asType", codecsDeserialized[1] instanceof AsTypeCodec);
+
+		codecs = new Codec[]{
+				new AsTypeCodec(DataType.FLOAT64, DataType.INT16),
+				new GzipCompression()
+		};
+		jsonCodecArray = gson.toJsonTree(codecs).getAsJsonArray();
+		expected = gson.fromJson(
+				"[{\"name\":\"astype\",\"configuration\":{\"dataType\":\"float64\",\"encodedType\":\"int16\"}},{\"name\":\"gzip\",\"configuration\":{\"level\":-1,\"useZlib\":false}}]",
+				JsonElement.class);
+		assertEquals("codec array", expected, jsonCodecArray.getAsJsonArray());
+
+		codecsDeserialized = gson.fromJson(expected, Codec[].class);
+		assertEquals("codecs length not 2", 2, codecsDeserialized.length);
+		assertTrue("first codec not asType", codecsDeserialized[0] instanceof AsTypeCodec);
+		assertTrue("second codec not gzip", codecsDeserialized[1] instanceof GzipCompression);
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardIndexTest.java b/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardIndexTest.java
new file mode 100644
index 00000000..0c8ee24a
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardIndexTest.java
@@ -0,0 +1,124 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Paths;
+
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.LockedChannel;
+import org.janelia.saalfeldlab.n5.N5FSTest;
+import org.janelia.saalfeldlab.n5.N5KeyValueWriter;
+import org.janelia.saalfeldlab.n5.codec.RawBytes;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.codec.checksum.Crc32cChecksumCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+import org.junit.After;
+import org.junit.Test;
+
+public class ShardIndexTest {
+
+	private static final N5FSTest tempN5Factory = new N5FSTest();
+
+	@After
+	public void removeTempWriters() {
+		tempN5Factory.removeTempWriters();
+	}
+
+	@Test
+	public void testOffsetIndex() throws IOException {
+
+		int[] shardBlockGridSize = new int[]{5,4,3};
+		ShardIndex index = new ShardIndex(
+				shardBlockGridSize,
+				IndexLocation.END, new RawBytes());
+
+		GridIterator it = new GridIterator(shardBlockGridSize);
+		int i = 0;
+		while( it.hasNext()) {
+			int j = index.getOffsetIndex(GridIterator.long2int(it.next()));
+			assertEquals(i, j);
+			i+=2;
+		}
+		
+		
+		shardBlockGridSize = new int[]{5,4,3,13};
+		index = new ShardIndex(
+				shardBlockGridSize,
+				IndexLocation.END, new RawBytes());
+
+		it = new GridIterator(shardBlockGridSize);
+		i = 0;
+		while( it.hasNext()) {
+			int j = index.getOffsetIndex(GridIterator.long2int(it.next()));
+			assertEquals(i, j);
+			i+=2;
+		}
+
+	}
+
+	@Test
+	public void testReadVirtual() throws IOException {
+
+		final N5KeyValueWriter writer = (N5KeyValueWriter) tempN5Factory.createTempN5Writer();
+		final KeyValueAccess kva = writer.getKeyValueAccess();
+
+		final int[] shardBlockGridSize = new int[] { 6, 5 };
+		final IndexLocation indexLocation = IndexLocation.END;
+		final DeterministicSizeCodec[] indexCodecs = new DeterministicSizeCodec[] { new RawBytes(),
+				new Crc32cChecksumCodec() };
+
+		final String path = Paths.get(Paths.get(writer.getURI()).toAbsolutePath().toString(), "0").toString();
+
+		final ShardIndex index = new ShardIndex(shardBlockGridSize, indexLocation, indexCodecs);
+		index.set(0, 6, new int[] { 0, 0 });
+		index.set(19, 32, new int[] { 1, 0 });
+		index.set(93, 111, new int[] { 3, 0 });
+		index.set(143, 1, new int[] { 1, 2 });
+		ShardIndex.write(index, kva, path);
+
+		final ShardIndex other = new ShardIndex(shardBlockGridSize, indexLocation, indexCodecs);
+		ShardIndex.read(kva, path, other);
+
+		assertEquals(index, other);
+	}
+
+	@Test
+	public void testReadInMemory() throws IOException {
+
+		final N5KeyValueWriter writer = (N5KeyValueWriter) tempN5Factory.createTempN5Writer();
+		final KeyValueAccess kva = writer.getKeyValueAccess();
+
+		final int[] shardBlockGridSize = new int[] { 6, 5 };
+		final IndexLocation indexLocation = IndexLocation.END;
+		final DeterministicSizeCodec[] indexCodecs = new DeterministicSizeCodec[] { 
+				new RawBytes(),
+				new Crc32cChecksumCodec() };
+		final String path = Paths.get(Paths.get(writer.getURI()).toAbsolutePath().toString(), "indexTest").toString();
+
+		final ShardIndex index = new ShardIndex(shardBlockGridSize, indexLocation, indexCodecs);
+		index.set(0, 6, new int[] { 0, 0 });
+		index.set(19, 32, new int[] { 1, 0 });
+		index.set(93, 111, new int[] { 3, 0 });
+		index.set(143, 1, new int[] { 1, 2 });
+		ShardIndex.write(index, kva, path);
+
+		final ShardIndex indexRead = new ShardIndex(shardBlockGridSize, indexLocation, indexCodecs);
+		ShardIndex.read(rawBytes(kva, path), indexRead);
+
+		assertEquals(index, indexRead);
+	}
+
+	private static byte[] rawBytes(KeyValueAccess kva, String path) throws IOException {
+
+		final byte[] rawBytes = new byte[(int) kva.size(path)];
+		try (final LockedChannel lockedChannel = kva.lockForReading(path)) {
+			try (final InputStream is = lockedChannel.newInputStream()) {
+				is.read(rawBytes);
+			}
+		}
+		return rawBytes;
+	}
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardPropertiesTests.java b/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardPropertiesTests.java
new file mode 100644
index 00000000..eb0d6de4
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardPropertiesTests.java
@@ -0,0 +1,128 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+import org.janelia.saalfeldlab.n5.util.Position;
+import org.junit.Test;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+public class ShardPropertiesTests {
+
+	@Test
+	public void testShardProperties() throws Exception {
+
+		final long[] arraySize = new long[]{16, 16};
+		final int[] shardSize = new int[]{16, 16};
+		final long[] shardPosition = new long[]{1, 1};
+		final int[] blkSize = new int[]{4, 4};
+
+		final DatasetAttributes dsetAttrs = new DatasetAttributes(
+				arraySize,
+				shardSize,
+				blkSize,
+				DataType.UINT8,
+				new ShardingCodec(
+						blkSize,
+						new Codec[]{},
+						new DeterministicSizeCodec[]{},
+						IndexLocation.END
+				)
+		);
+
+		@SuppressWarnings({"rawtypes", "unchecked"}) final InMemoryShard shard = new InMemoryShard(dsetAttrs, shardPosition, null);
+
+		assertArrayEquals(new int[]{4, 4}, shard.getBlockGridSize());
+
+		assertArrayEquals(new long[]{0, 0}, shard.getShardPosition(0, 0));
+		assertArrayEquals(new long[]{1, 1}, shard.getShardPosition(5, 5));
+		assertArrayEquals(new long[]{1, 0}, shard.getShardPosition(5, 0));
+		assertArrayEquals(new long[]{0, 1}, shard.getShardPosition(0, 5));
+
+		//		assertNull(shard.getBlockPosition(0, 0));
+		//		assertNull(shard.getBlockPosition(3, 3));
+
+		assertArrayEquals(new int[]{0, 0}, shard.getBlockPosition(4, 4));
+		assertArrayEquals(new int[]{1, 1}, shard.getBlockPosition(5, 5));
+		assertArrayEquals(new int[]{2, 2}, shard.getBlockPosition(6, 6));
+		assertArrayEquals(new int[]{3, 3}, shard.getBlockPosition(7, 7));
+	}
+
+	@Test
+	public void testShardBlockPositionIterator() throws Exception {
+
+		final long[] arraySize = new long[]{16, 16};
+		final int[] shardSize = new int[]{16, 16};
+		final long[] shardPosition = new long[]{1, 1};
+		final int[] blkSize = new int[]{4, 4};
+
+		final DatasetAttributes dsetAttrs = new DatasetAttributes(
+				arraySize,
+				shardSize,
+				blkSize,
+				DataType.UINT8,
+				new ShardingCodec(
+						blkSize,
+						new Codec[]{},
+						new DeterministicSizeCodec[]{},
+						IndexLocation.END
+				)
+		);
+
+		@SuppressWarnings({"rawtypes", "unchecked"}) final InMemoryShard shard = new InMemoryShard(dsetAttrs, shardPosition, null);
+
+		int i = 0;
+		Iterator<long[]> it = shard.blockPositionIterator();
+		long[] p = null;
+		while (it.hasNext()) {
+
+			p = it.next();
+			if (i == 0)
+				assertArrayEquals(new long[]{4, 4}, p);
+
+			i++;
+		}
+		assertEquals(16, i);
+		assertArrayEquals(new long[]{7, 7}, p);
+	}
+
+	@Test
+	public void testShardGrouping() {
+
+		final long[] arraySize = new long[]{8, 12};
+		final int[] shardSize = new int[]{4, 6};
+		final int[] blkSize = new int[]{2, 3};
+
+		final DatasetAttributes attrs = new DatasetAttributes(
+				arraySize,
+				shardSize,
+				blkSize,
+				DataType.UINT8,
+				new ShardingCodec(
+						blkSize,
+						new Codec[]{},
+						new DeterministicSizeCodec[]{},
+						IndexLocation.END
+				)
+		);
+
+		List<long[]> blockPositions = attrs.blockPositions().collect(Collectors.toList());
+		final Map<Position, List<long[]>> result = attrs.groupBlockPositions(blockPositions);
+
+		// there are four shards in this image
+		assertEquals(4, result.keySet().size());
+
+		// there are four blocks per shard in this image
+		result.values().stream().forEach(x -> assertEquals(4, x.size()));
+	}
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardTest.java b/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardTest.java
new file mode 100644
index 00000000..b19e07e5
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/shard/ShardTest.java
@@ -0,0 +1,319 @@
+package org.janelia.saalfeldlab.n5.shard;
+
+import org.janelia.saalfeldlab.n5.ByteArrayDataBlock;
+import org.janelia.saalfeldlab.n5.DataBlock;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.KeyValueAccess;
+import org.janelia.saalfeldlab.n5.N5FSTest;
+import org.janelia.saalfeldlab.n5.N5KeyValueWriter;
+import org.janelia.saalfeldlab.n5.N5Writer;
+import org.janelia.saalfeldlab.n5.ShardedDatasetAttributes;
+import org.janelia.saalfeldlab.n5.codec.Codec;
+import org.janelia.saalfeldlab.n5.codec.DeterministicSizeCodec;
+import org.janelia.saalfeldlab.n5.codec.N5BlockCodec;
+import org.janelia.saalfeldlab.n5.codec.RawBytes;
+import org.janelia.saalfeldlab.n5.codec.checksum.Crc32cChecksumCodec;
+import org.janelia.saalfeldlab.n5.shard.ShardingCodec.IndexLocation;
+import org.janelia.saalfeldlab.n5.util.GridIterator;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertArrayEquals;
+
+@RunWith(Parameterized.class)
+public class ShardTest {
+
+	private static final N5FSTest tempN5Factory = new N5FSTest();
+
+	@Parameterized.Parameters(name = "IndexLocation({0}), Block ByteOrder({1}), Index ByteOrder({2})")
+	public static Collection<Object[]> data() {
+
+		final ArrayList<Object[]> params = new ArrayList<>();
+		for (IndexLocation indexLoc : IndexLocation.values()) {
+			for (ByteOrder blockByteOrder : new ByteOrder[]{ByteOrder.BIG_ENDIAN, ByteOrder.LITTLE_ENDIAN}) {
+				for (ByteOrder indexByteOrder : new ByteOrder[]{ByteOrder.BIG_ENDIAN, ByteOrder.LITTLE_ENDIAN}) {
+					params.add(new Object[]{indexLoc, blockByteOrder, indexByteOrder});
+				}
+			}
+		}
+		final Object[][] paramArray = new Object[params.size()][];
+		Arrays.setAll(paramArray, params::get);
+		return Arrays.asList(paramArray);
+	}
+
+	@Parameterized.Parameter()
+	public IndexLocation indexLocation;
+
+	@Parameterized.Parameter(1)
+	public ByteOrder dataByteOrder;
+
+	@Parameterized.Parameter(2)
+	public ByteOrder indexByteOrder;
+
+	@After
+	public void removeTempWriters() {
+
+		tempN5Factory.removeTempWriters();
+	}
+
+	private DatasetAttributes getTestAttributes(long[] dimensions, int[] shardSize, int[] blockSize) {
+
+		return new DatasetAttributes(
+				dimensions,
+				shardSize,
+				blockSize,
+				DataType.UINT8,
+				new ShardingCodec(
+						blockSize,
+						new Codec[]{new N5BlockCodec(dataByteOrder)}, //, new GzipCompression(4)},
+						new DeterministicSizeCodec[]{new RawBytes(indexByteOrder), new Crc32cChecksumCodec()},
+						indexLocation
+				)
+		);
+	}
+
+	private DatasetAttributes getTestAttributes() {
+
+		return getTestAttributes(new long[]{8, 8}, new int[]{4, 4}, new int[]{2, 2});
+	}
+
+	@Test
+	public void writeReadBlocksTest() {
+
+		final N5Writer writer = tempN5Factory.createTempN5Writer();
+		final DatasetAttributes datasetAttributes = getTestAttributes(
+				new long[]{24, 24},
+				new int[]{8, 8},
+				new int[]{2, 2}
+		);
+
+		writer.createDataset("shard", datasetAttributes);
+
+		final int[] blockSize = datasetAttributes.getBlockSize();
+		final int numElements = blockSize[0] * blockSize[1];
+
+		final byte[] data = new byte[numElements];
+		for (int i = 0; i < data.length; i++) {
+			data[i] = (byte)((100) + (10) + i);
+		}
+
+		writer.writeBlocks(
+				"shard",
+				datasetAttributes,
+				/* shard (0, 0) */
+				new ByteArrayDataBlock(blockSize, new long[]{0, 0}, data),
+				new ByteArrayDataBlock(blockSize, new long[]{0, 1}, data),
+				new ByteArrayDataBlock(blockSize, new long[]{1, 0}, data),
+				new ByteArrayDataBlock(blockSize, new long[]{1, 1}, data),
+
+				/* shard (1, 0) */
+				new ByteArrayDataBlock(blockSize, new long[]{4, 0}, data),
+				new ByteArrayDataBlock(blockSize, new long[]{5, 0}, data),
+
+				/* shard (2, 2) */
+				new ByteArrayDataBlock(blockSize, new long[]{11, 11}, data)
+		);
+
+		final KeyValueAccess kva = ((N5KeyValueWriter)writer).getKeyValueAccess();
+
+		final String[][] keys = new String[][]{
+				{"shard", "0", "0"},
+				{"shard", "1", "0"},
+				{"shard", "2", "2"}
+		};
+		for (String[] key : keys) {
+			final String shard = kva.compose(writer.getURI(), key);
+			Assert.assertTrue("Shard at" + Arrays.toString(key) + "Does not exist", kva.exists(shard));
+		}
+
+		final long[][] blockIndices = new long[][]{{0, 0}, {0, 1}, {1, 0}, {1, 1}, {4, 0}, {5, 0}, {11, 11}};
+		for (long[] blockIndex : blockIndices) {
+			final DataBlock<?> block = writer.readBlock("shard", datasetAttributes, blockIndex);
+			Assert.assertArrayEquals("Read from shard doesn't match", data, (byte[])block.getData());
+		}
+
+		final byte[] data2 = new byte[numElements];
+		for (int i = 0; i < data2.length; i++) {
+			data2[i] = (byte)(10 + i);
+		}
+		writer.writeBlocks(
+				"shard",
+				datasetAttributes,
+				/* shard (0, 0) */
+				new ByteArrayDataBlock(blockSize, new long[]{0, 0}, data2),
+				new ByteArrayDataBlock(blockSize, new long[]{1, 1}, data2),
+
+				/* shard (0, 1) */
+				new ByteArrayDataBlock(blockSize, new long[]{0, 4}, data2),
+				new ByteArrayDataBlock(blockSize, new long[]{0, 5}, data2),
+
+				/* shard (2, 2) */
+				new ByteArrayDataBlock(blockSize, new long[]{10, 10}, data2)
+		);
+
+		final String[][] keys2 = new String[][]{
+				{"shard", "0", "0"},
+				{"shard", "1", "0"},
+				{"shard", "0", "1"},
+				{"shard", "2", "2"}
+		};
+		for (String[] key : keys2) {
+			final String shard = kva.compose(writer.getURI(), key);
+			Assert.assertTrue("Shard at" + Arrays.toString(key) + "Does not exist", kva.exists(shard));
+		}
+
+		final long[][] oldBlockIndices = new long[][]{{0, 1}, {1, 0}, {4, 0}, {5, 0}, {11, 11}};
+		for (long[] blockIndex : oldBlockIndices) {
+			final DataBlock<?> block = writer.readBlock("shard", datasetAttributes, blockIndex);
+			Assert.assertArrayEquals("Read from shard doesn't match", data, (byte[])block.getData());
+		}
+
+		final long[][] newBlockIndices = new long[][]{{0, 0}, {1, 1}, {0, 4}, {0, 5}, {10, 10}};
+		for (long[] blockIndex : newBlockIndices) {
+			final DataBlock<?> block = writer.readBlock("shard", datasetAttributes, blockIndex);
+			Assert.assertArrayEquals("Read from shard doesn't match", data2, (byte[])block.getData());
+		}
+	}
+
+	@Test
+	public void writeReadBlockTest() {
+
+		final N5Writer writer = tempN5Factory.createTempN5Writer();
+		final DatasetAttributes datasetAttributes = getTestAttributes();
+
+		writer.createDataset("shard", datasetAttributes);
+		writer.deleteBlock("shard", 0, 0);
+
+		final int[] blockSize = datasetAttributes.getBlockSize();
+		final DataType dataType = datasetAttributes.getDataType();
+		final int numElements = 2 * 2;
+
+		final HashMap<long[], byte[]> writtenBlocks = new HashMap<>();
+
+		for (int idx1 = 1; idx1 >= 0; idx1--) {
+			for (int idx2 = 1; idx2 >= 0; idx2--) {
+				final long[] gridPosition = {idx1, idx2};
+				final DataBlock<?> dataBlock = dataType.createDataBlock(blockSize, gridPosition, numElements);
+				byte[] data = (byte[])dataBlock.getData();
+				for (int i = 0; i < data.length; i++) {
+					data[i] = (byte)((idx1 * 100) + (idx2 * 10) + i);
+				}
+				writer.writeBlock("shard", datasetAttributes, dataBlock);
+
+				final DataBlock<?> block = writer.readBlock("shard", datasetAttributes, gridPosition.clone());
+				Assert.assertArrayEquals("Read from shard doesn't match", data, (byte[])block.getData());
+
+				for (Map.Entry<long[], byte[]> entry : writtenBlocks.entrySet()) {
+					final long[] otherGridPosition = entry.getKey();
+					final byte[] otherData = entry.getValue();
+					final DataBlock<?> otherBlock = writer.readBlock("shard", datasetAttributes, otherGridPosition);
+					Assert.assertArrayEquals("Read prior write from shard no loner matches", otherData, (byte[])otherBlock.getData());
+				}
+
+				writtenBlocks.put(gridPosition, data);
+			}
+		}
+	}
+
+	@Test
+	public void writeReadShardTest() {
+
+		final N5Writer writer = tempN5Factory.createTempN5Writer();
+
+		final DatasetAttributes datasetAttributes = getTestAttributes();
+		writer.createDataset("wholeShard", datasetAttributes);
+		writer.deleteBlock("wholeShard", 0, 0);
+
+		final int[] blockSize = datasetAttributes.getBlockSize();
+		final DataType dataType = datasetAttributes.getDataType();
+		final int numElements = 2 * 2;
+
+		final HashMap<long[], byte[]> writtenBlocks = new HashMap<>();
+
+		final InMemoryShard<byte[]> shard = new InMemoryShard<>(datasetAttributes, new long[]{0, 0});
+
+		for (int idx1 = 1; idx1 >= 0; idx1--) {
+			for (int idx2 = 1; idx2 >= 0; idx2--) {
+				final long[] gridPosition = {idx1, idx2};
+				final DataBlock<?> dataBlock = dataType.createDataBlock(blockSize, gridPosition, numElements);
+				byte[] data = (byte[])dataBlock.getData();
+				for (int i = 0; i < data.length; i++) {
+					data[i] = (byte)((idx1 * 100) + (idx2 * 10) + i);
+				}
+				shard.addBlock((DataBlock<byte[]>)dataBlock);
+				writtenBlocks.put(gridPosition, data);
+			}
+		}
+
+		writer.writeShard("wholeShard", datasetAttributes, shard);
+
+		for (Map.Entry<long[], byte[]> entry : writtenBlocks.entrySet()) {
+			final long[] otherGridPosition = entry.getKey();
+			final byte[] otherData = entry.getValue();
+			final DataBlock<?> otherBlock = writer.readBlock("wholeShard", datasetAttributes, otherGridPosition);
+			Assert.assertArrayEquals("Read prior write from shard no loner matches", otherData, (byte[])otherBlock.getData());
+		}
+	}
+
+	@Test
+	@Ignore
+	public void writeReadNestedShards() {
+
+		int[] blockSize = new int[]{4, 4};
+		int N = Arrays.stream(blockSize).reduce(1, (x, y) -> x * y);
+
+		final N5Writer writer = tempN5Factory.createTempN5Writer();
+		final DatasetAttributes datasetAttributes = getNestedShardCodecsAttributes(blockSize);
+		writer.createDataset("nestedShards", datasetAttributes);
+
+		final byte[] data = new byte[N];
+		Arrays.fill(data, (byte)4);
+
+		writer.writeBlocks("nestedShards", datasetAttributes,
+				new ByteArrayDataBlock(blockSize, new long[]{1, 1}, data),
+				new ByteArrayDataBlock(blockSize, new long[]{0, 2}, data),
+				new ByteArrayDataBlock(blockSize, new long[]{2, 1}, data));
+
+		assertArrayEquals(data, (byte[])writer.readBlock("nestedShards", datasetAttributes, 1, 1).getData());
+		assertArrayEquals(data, (byte[])writer.readBlock("nestedShards", datasetAttributes, 0, 2).getData());
+		assertArrayEquals(data, (byte[])writer.readBlock("nestedShards", datasetAttributes, 2, 1).getData());
+	}
+
+	private DatasetAttributes getNestedShardCodecsAttributes(int[] blockSize) {
+
+		final int[] innerShardSize = new int[]{2 * blockSize[0], 2 * blockSize[1]};
+		final int[] shardSize = new int[]{4 * blockSize[0], 4 * blockSize[1]};
+		final long[] dimensions = GridIterator.int2long(shardSize);
+
+		// TODO: its not even clear how we build this given
+		// 	this constructor. Is the block size of the sharded dataset attributes
+		// 	the innermost (block) size or the intermediate shard size?
+		// 	probably better to forget about this class - only use DatasetAttributes
+		// 	and detect shading in another way
+		final ShardingCodec innerShard = new ShardingCodec(innerShardSize,
+				new Codec[]{new RawBytes()},
+				new DeterministicSizeCodec[]{new RawBytes(indexByteOrder), new Crc32cChecksumCodec()},
+				IndexLocation.START);
+
+		return new DatasetAttributes(
+				dimensions, shardSize, blockSize, DataType.UINT8,
+				new ShardingCodec(
+						blockSize,
+						new Codec[]{innerShard},
+						new DeterministicSizeCodec[]{new RawBytes(indexByteOrder), new Crc32cChecksumCodec()},
+						IndexLocation.END)
+				);
+	}
+}
diff --git a/src/test/resources/shardExamples/test.n5/attributes.json b/src/test/resources/shardExamples/test.n5/attributes.json
new file mode 100644
index 00000000..573b0188
--- /dev/null
+++ b/src/test/resources/shardExamples/test.n5/attributes.json
@@ -0,0 +1,3 @@
+{
+  "n5": "4.0.0"
+}
\ No newline at end of file
diff --git a/src/test/resources/shardExamples/test.n5/mid_sharded/attributes.json b/src/test/resources/shardExamples/test.n5/mid_sharded/attributes.json
new file mode 100644
index 00000000..b9e575b2
--- /dev/null
+++ b/src/test/resources/shardExamples/test.n5/mid_sharded/attributes.json
@@ -0,0 +1,30 @@
+{
+  "codecs": [
+    {
+      "name": "sharding_indexed",
+      "configuration": {
+        "chunk_shape": [
+          2,
+          3
+        ],
+        "codecs": [
+          {
+            "name": "bytes",
+            "configuration": {
+              "endian": "little"
+            }
+          }
+        ],
+        "index_codecs": [
+          {
+            "name": "bytes",
+            "configuration": {
+              "endian": "little"
+            }
+          }
+        ],
+        "index_location": "end"
+      }
+    }
+  ]
+}
diff --git a/src/test/resources/shardExamples/test.zarr/mid_sharded/attributes.json b/src/test/resources/shardExamples/test.zarr/mid_sharded/attributes.json
new file mode 100644
index 00000000..920dff92
--- /dev/null
+++ b/src/test/resources/shardExamples/test.zarr/mid_sharded/attributes.json
@@ -0,0 +1,54 @@
+{
+  "shape": [
+    4,
+    6
+  ],
+  "fill_value": 0,
+  "chunk_grid": {
+    "name": "regular",
+    "configuration": {
+      "chunk_shape": [
+        4,
+        6
+      ]
+    }
+  },
+  "attributes": {},
+  "zarr_format": 3,
+  "data_type": "uint8",
+  "chunk_key_encoding": {
+    "name": "default",
+    "configuration": {
+      "separator": "/"
+    }
+  },
+  "codecs": [
+    {
+      "name": "sharding_indexed",
+      "configuration": {
+        "chunk_shape": [
+          2,
+          3
+        ],
+        "codecs": [
+          {
+            "name": "n5bytes",
+            "configuration": {
+              "endian": "little"
+            }
+          }
+        ],
+        "index_codecs": [
+          {
+            "name": "bytes",
+            "configuration": {
+              "endian": "little"
+            }
+          }
+        ],
+        "index_location": "end"
+      }
+    }
+  ],
+  "node_type": "array"
+}
diff --git a/src/test/resources/shardExamples/test.zarr/mid_sharded/c/0/0 b/src/test/resources/shardExamples/test.zarr/mid_sharded/c/0/0
new file mode 100644
index 00000000..19ad91e1
Binary files /dev/null and b/src/test/resources/shardExamples/test.zarr/mid_sharded/c/0/0 differ
diff --git a/src/test/resources/shardExamples/test.zarr/mid_sharded/zarr.json b/src/test/resources/shardExamples/test.zarr/mid_sharded/zarr.json
new file mode 100644
index 00000000..a80cb9d9
--- /dev/null
+++ b/src/test/resources/shardExamples/test.zarr/mid_sharded/zarr.json
@@ -0,0 +1,54 @@
+{
+  "shape": [
+    4,
+    6
+  ],
+  "fill_value": 0,
+  "chunk_grid": {
+    "name": "regular",
+    "configuration": {
+      "chunk_shape": [
+        4,
+        6
+      ]
+    }
+  },
+  "attributes": {},
+  "zarr_format": 3,
+  "data_type": "uint8",
+  "chunk_key_encoding": {
+    "name": "default",
+    "configuration": {
+      "separator": "/"
+    }
+  },
+  "codecs": [
+    {
+      "name": "sharding_indexed",
+      "configuration": {
+        "chunk_shape": [
+          2,
+          3
+        ],
+        "codecs": [
+          {
+            "name": "bytes",
+            "configuration": {
+              "endian": "little"
+            }
+          }
+        ],
+        "index_codecs": [
+          {
+            "name": "bytes",
+            "configuration": {
+              "endian": "little"
+            }
+          }
+        ],
+        "index_location": "end"
+      }
+    }
+  ],
+  "node_type": "array"
+}