Skip to content

Commit

Permalink
fix: don't use platform paths for groups
Browse files Browse the repository at this point in the history
On windows, `\` is interpreted as an escape character when normalizing the group path, and it is removed.
  • Loading branch information
cmhulbert committed Sep 1, 2023
1 parent bc0da63 commit 606731c
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,7 @@
import org.janelia.saalfeldlab.labels.blocks.LabelBlockLookupKey;
import org.janelia.saalfeldlab.labels.blocks.n5.IsRelativeToContainer;
import org.janelia.saalfeldlab.labels.downsample.WinnerTakesAll;
import org.janelia.saalfeldlab.n5.ByteArrayDataBlock;
import org.janelia.saalfeldlab.n5.DataBlock;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.LongArrayDataBlock;
import org.janelia.saalfeldlab.n5.N5Reader;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.*;
import org.janelia.saalfeldlab.n5.imglib2.N5LabelMultisets;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
import org.janelia.saalfeldlab.paintera.data.mask.persist.PersistCanvas;
Expand All @@ -57,7 +52,6 @@

import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
Expand Down Expand Up @@ -136,7 +130,8 @@ public void updateLabelBlockLookup(final List<TLongObjectMap<BlockDiff>> blockDi

LOG.debug("Found scale datasets {}", (Object)scaleUniqueLabels);
for (int level = 0; level < scaleUniqueLabels.length; ++level) {
final DatasetSpec datasetUniqueLabels = DatasetSpec.of(n5Writer, Paths.get(uniqueLabelsPath, scaleUniqueLabels[level]).toString());
final String uniqueLabelScalePath = N5URI.normalizeGroupPath(uniqueLabelsPath + n5Writer.getGroupSeparator() + scaleUniqueLabels[level]);
final DatasetSpec datasetUniqueLabels = DatasetSpec.of(n5Writer, uniqueLabelScalePath);
final TLongObjectMap<TLongHashSet> removedById = new TLongObjectHashMap<>();
final TLongObjectMap<TLongHashSet> addedById = new TLongObjectHashMap<>();
final TLongObjectMap<BlockDiff> blockDiffs = blockDiffsByLevel.get(level);
Expand Down Expand Up @@ -262,8 +257,8 @@ public List<TLongObjectMap<BlockDiff>> persistCanvas(final CachedCellImg<Unsigne

final TLongObjectHashMap<BlockDiff> blockDiffsAt = new TLongObjectHashMap<>();
blockDiffs.add(blockDiffsAt);
final DatasetSpec targetDataset = DatasetSpec.of(n5Writer, Paths.get(dataset, scaleDatasets[level]).toString());
final DatasetSpec previousDataset = DatasetSpec.of(n5Writer, Paths.get(dataset, scaleDatasets[level - 1]).toString());
final DatasetSpec targetDataset = DatasetSpec.of(n5Writer, N5URI.normalizeGroupPath(dataset + n5Writer.getGroupSeparator() + scaleDatasets[level]));
final DatasetSpec previousDataset = DatasetSpec.of(n5Writer, N5URI.normalizeGroupPath(dataset + n5Writer.getGroupSeparator() + scaleDatasets[level - 1]));

final double[] targetDownsamplingFactors = N5Helpers.getDownsamplingFactors(n5Writer, targetDataset.dataset);
final double[] previousDownsamplingFactors = N5Helpers.getDownsamplingFactors(n5Writer, previousDataset.dataset);
Expand Down
8 changes: 4 additions & 4 deletions src/main/java/org/janelia/saalfeldlab/util/n5/N5Data.java
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ ImagesWithTransform<T, V>[] openRawMultiscale(

final double[] initialDonwsamplingFactors = N5Helpers.getDownsamplingFactors(
reader,
Paths.get(dataset, scaleDatasets[0]).toString()
N5URI.normalizeGroupPath(dataset + reader.getGroupSeparator() + scaleDatasets[0])
);
LOG.debug("Initial transform={}", transform);
final ExecutorService es = Executors.newFixedThreadPool(
Expand All @@ -393,7 +393,7 @@ ImagesWithTransform<T, V>[] openRawMultiscale(
final int fScale = scale;
futures.add(es.submit(ThrowingSupplier.unchecked(() -> {
LOG.debug("Populating scale level {}", fScale);
final String scaleDataset = Paths.get(dataset, scaleDatasets[fScale]).toString();
final String scaleDataset = N5URI.normalizeGroupPath(dataset + reader.getGroupSeparator() + scaleDatasets[fScale]);
imagesWithInvalidate[fScale] = openRaw(reader, scaleDataset, transform.copy(), queue, priority);
final double[] downsamplingFactors = N5Helpers.getDownsamplingFactors(reader, scaleDataset);
LOG.debug("Read downsampling factors: {}", Arrays.toString(downsamplingFactors));
Expand Down Expand Up @@ -685,7 +685,7 @@ public static ImagesWithTransform<LabelMultisetType, VolatileLabelMultisetType>[

final double[] initialDonwsamplingFactors = N5Helpers.getDownsamplingFactors(
reader,
Paths.get(dataset, scaleDatasets[0]).toString());
N5URI.normalizeGroupPath(dataset + reader.getGroupSeparator() + scaleDatasets[0]));
final ExecutorService es = Executors.newFixedThreadPool(
scaleDatasets.length,
new NamedThreadFactory("populate-mipmap-scales-%d", true));
Expand All @@ -695,7 +695,7 @@ public static ImagesWithTransform<LabelMultisetType, VolatileLabelMultisetType>[
final int fScale = scale;
futures.add(es.submit(ThrowingSupplier.unchecked(() -> {
LOG.debug("Populating scale level {}", fScale);
final String scaleDataset = Paths.get(dataset, scaleDatasets[fScale]).toString();
final String scaleDataset = N5URI.normalizeGroupPath(dataset + reader.getGroupSeparator() + scaleDatasets[fScale]);
imagesWithInvalidate[fScale] = openLabelMultiset(reader, scaleDataset, transform.copy(), queue, priority);
final double[] downsamplingFactors = N5Helpers.getDownsamplingFactors(reader, scaleDataset);
LOG.debug("Read downsampling factors: {}", Arrays.toString(downsamplingFactors));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,10 +352,11 @@ private static <T> void testPainteraData(
testCanvasPersistance(container, dataset, s0, canvas, openLabels, asserts);

// test highest level block lookups
final String uniqueBlock0 = String.join("/", dataset, "unique-labels", "s0");
final String groupSeparator = container.getReader().getGroupSeparator();
final String uniqueBlock0Group = N5URI.normalizeGroupPath(String.join(groupSeparator, dataset, "unique-labels","s0"));
final Path mappingPattern = Paths.get(container.getUri().getPath(), dataset, "label-to-block-mapping", "s%d", "%d");
final Path mapping0 = Paths.get(container.getUri().getPath(), dataset, "label-to-block-mapping", "s0");
final DatasetAttributes uniqueBlockAttributes = writer.getDatasetAttributes(uniqueBlock0);
final DatasetAttributes uniqueBlockAttributes = writer.getDatasetAttributes(uniqueBlock0Group);
final List<Interval> blocks = Grids.collectAllContainedIntervals(dims, blockSize);
final TLongObjectMap<TLongSet> labelToBLockMapping = new TLongObjectHashMap<>();
for (final Interval block : blocks) {
Expand All @@ -375,7 +376,7 @@ private static <T> void testPainteraData(
}
});

final DataBlock<?> uniqueBlock = writer.readBlock(uniqueBlock0, uniqueBlockAttributes, blockPos);
final DataBlock<?> uniqueBlock = writer.readBlock(uniqueBlock0Group, uniqueBlockAttributes, blockPos);
Assert.assertEquals(labels, new TLongHashSet((long[])uniqueBlock.getData()));
}

Expand Down

0 comments on commit 606731c

Please sign in to comment.