Skip to content

Commit

Permalink
Merge branch 'maint-5.x' into maint-5.x_sigmet
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeldiener committed Mar 11, 2024
2 parents 14e430b + a2c7146 commit 343faf1
Show file tree
Hide file tree
Showing 8 changed files with 213 additions and 12 deletions.
22 changes: 18 additions & 4 deletions cdm/core/src/main/java/ucar/nc2/NetcdfFiles.java
Original file line number Diff line number Diff line change
Expand Up @@ -498,13 +498,20 @@ private static String findCompressedSuffix(String filename) {
private static String makeUncompressed(String filename) throws Exception {
String suffix = findCompressedSuffix(filename);
int pos = filename.lastIndexOf(suffix);
log.debug("suffix {}, pos {}", suffix, pos);

String basepath = filename.substring(0, pos - 1);
String itempath = filename.substring(pos + suffix.length());
// rebuild filepath without suffix (same as base path if there is not item path)
String uncompressedFilename = basepath + itempath;
// name of parent file
String baseFilename = basepath + "." + suffix;

log.debug("basepath '{}'", basepath);
log.debug("itempath '{}'", itempath);
log.debug("uncompressedFilename '{}'", uncompressedFilename);
log.debug("baseFilename '{}'", baseFilename);

// coverity claims resource leak, but attempts to fix break. so beware
// see if already decompressed, check in cache as needed
File uncompressedFile = DiskCache.getFileStandardPolicy(uncompressedFilename);
Expand Down Expand Up @@ -571,18 +578,25 @@ private static String makeUncompressed(String filename) throws Exception {
try (InputStream in = new UncompressInputStream(new FileInputStream(baseFilename))) {
copy(in, fout, 100000);
}
if (NetcdfFile.debugCompress)
if (NetcdfFile.debugCompress) {
log.info("uncompressed {} to {}", filename, uncompressedFile);
}

} else if (suffix.equalsIgnoreCase("zip")) {
// find specified zip entry, if it exists
try (ZipInputStream zin = new ZipInputStream(new FileInputStream(baseFilename))) {
// If a desired zipentry ID was appended to method's filename parameter, then itempath
// is of length > 1 and ID starts at itempath char offset 1.
String itemName = (itempath.length() > 1) ? itempath.substring(1) : "";
log.debug("seeking zip itemName '{}'", itempath, itemName);
ZipEntry ze = zin.getNextEntry();
String itemName = itempath.substring(1); // remove initial /

while (ze != null) {
if (itempath.isEmpty() || ze.getName().equals(itemName)) {
copy(zin, fout, 100000);
if (NetcdfFile.debugCompress)
log.info("unzipped {} entry {} to {}", filename, ze.getName(), uncompressedFile);
if (NetcdfFile.debugCompress) {
log.debug("unzipped {} entry {} to {}", filename, ze.getName(), uncompressedFile);
}
break;
}
zin.closeEntry();
Expand Down
3 changes: 1 addition & 2 deletions cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -1657,8 +1657,7 @@ private Builder<?> addLocalFieldsToBuilder(Builder<? extends Builder<?>> b) {
this.coordSys.forEach(sys -> b.coords.addCoordinateSystem(sys.toBuilder()));
this.coordTransforms.forEach(trans -> b.coords.addCoordinateTransform(trans.toBuilder()));

b.setOrgFile(this.orgFile).setConventionUsed(this.convUsed).setEnhanceMode(this.enhanceMode)
.setAggregation(this.agg);
b.setOrgFile(this).setConventionUsed(this.convUsed).setEnhanceMode(this.enhanceMode).setAggregation(this.agg);

return (Builder<?>) super.addLocalFieldsToBuilder(b);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,10 @@ public CoverageCoordAxisBuilder setDependsOn(String dependsOn) {
// for point: values are the points, values[npts]
// for intervals: values are the edges, values[2*npts]: low0, high0, low1, high1

public void setMissingTolerance(double tolerance) {
missingTolerance = tolerance;
}

public void setSpacingFromValues(boolean isInterval) {
if (isInterval) {
setSpacingFromIntervalValues();
Expand Down Expand Up @@ -227,7 +231,7 @@ private void setSpacingFromIntervalValues() {
}
}

private static final double missingTolerance = .05;
private double missingTolerance = .05;

private boolean isRegular(Counters.Counter resol) {
if (resol.getUnique() == 1) {
Expand Down
26 changes: 23 additions & 3 deletions cdm/core/src/main/java/ucar/nc2/util/cache/FileCache.java
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ private static void schedule(TimerTask task, long delay) {
protected String name;
protected final int softLimit, minElements, hardLimit;
protected final long period; // msecs
private boolean removeDeleted = false;

private final AtomicBoolean disabled = new AtomicBoolean(false); // cache is disabled
protected final AtomicBoolean hasScheduled = new AtomicBoolean(false); // a cleanup is scheduled
Expand Down Expand Up @@ -147,6 +148,23 @@ public FileCache(int minElementsInMemory, int softLimit, int hardLimit, int peri
this("", minElementsInMemory, softLimit, hardLimit, period);
}

/**
* Constructor.
*
* @param name of file cache
* @param minElementsInMemory keep this number in the cache
* @param softLimit trigger a cleanup if it goes over this number.
* @param hardLimit if > 0, never allow more than this many elements. This causes a cleanup to be done in the calling
* thread.
* @param period if > 0, do periodic cleanups every this number of seconds.
* @param removeDeleted if true, then remove deleted files from the cache when a cleanup is performed.
*/
public FileCache(String name, int minElementsInMemory, int softLimit, int hardLimit, int period,
boolean removeDeleted) {
this(name, minElementsInMemory, softLimit, hardLimit, period);
this.removeDeleted = removeDeleted;
}

/**
* Constructor.
*
Expand Down Expand Up @@ -699,9 +717,11 @@ public int compareTo(Tracker o) {
synchronized void cleanup(int maxElements) {

try {
for (CacheElement.CacheFile cacheFile : files.values()) {
if (!Files.exists(Paths.get(cacheFile.ncfile.getLocation()))) {
remove(cacheFile);
if (removeDeleted) {
for (CacheElement.CacheFile cacheFile : files.values()) {
if (!Files.exists(Paths.get(cacheFile.ncfile.getLocation()))) {
remove(cacheFile);
}
}
}

Expand Down
85 changes: 85 additions & 0 deletions cdm/core/src/test/java/ucar/nc2/dataset/TestNetcdfFileCache.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
package ucar.nc2.dataset;

import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;

import java.io.IOException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import ucar.nc2.NetcdfFile;
import ucar.nc2.ncml.TestNcmlRead;
import ucar.nc2.util.cache.FileCacheIF;
import ucar.unidata.util.test.TestDir;

public class TestNetcdfFileCache {
@BeforeClass
public static void setupCaches() {
NetcdfDatasets.initNetcdfFileCache(1, 10, 15, -1);
}

@AfterClass
public static void shutdownCaches() {
NetcdfDatasets.shutdown();
}

@After
public void cleanupAfterEach() {
NetcdfDatasets.getNetcdfFileCache().clearCache(true);
}

@Test
public void shouldReleaseLockOnNetcdfFileUsingBuilder() throws IOException {
final String filename = "file:./" + TestDir.cdmLocalTestDataDir + "jan.nc";
final DatasetUrl durl = DatasetUrl.findDatasetUrl(filename);

final NetcdfFile netcdfFile = NetcdfDatasets.acquireFile(durl, null);
final NetcdfDataset netcdfDatasetFromBuilder = NetcdfDataset.builder(netcdfFile).build();
// Closing the builder NetcdfDataset should close the original NetcdfFile acquired from cache
netcdfDatasetFromBuilder.close();

assertNoFilesAreLocked();
}

@Test
public void shouldReleaseLockOnNetcdfDatasetUsingBuilder() throws IOException {
final String filename = "file:./" + TestDir.cdmLocalTestDataDir + "jan.nc";
final DatasetUrl durl = DatasetUrl.findDatasetUrl(filename);

final NetcdfDataset netcdfDataset = NetcdfDatasets.acquireDataset(durl, null);
final NetcdfDataset netcdfDatasetFromBuilder = netcdfDataset.toBuilder().build();
// Closing the builder NetcdfDataset should close the original NetcdfDataset acquired from cache
netcdfDatasetFromBuilder.close();

assertNoFilesAreLocked();
}

@Test
public void shouldReleaseLockOnDataset() throws IOException {
final String filename = "file:./" + TestDir.cdmLocalTestDataDir + "jan.nc";
assertLockIsReleasedOnDataset(filename);
}

@Test
public void shouldReleaseLockOnAggregation() throws IOException {
final String filename = "file:./" + TestNcmlRead.topDir + "aggExisting.xml";
assertLockIsReleasedOnDataset(filename);
}

private static void assertLockIsReleasedOnDataset(String filename) throws IOException {
final DatasetUrl durl = DatasetUrl.findDatasetUrl(filename);
final NetcdfFile netcdfFile = NetcdfDatasets.acquireFile(durl, null);
final NetcdfDataset netcdfDataset = NetcdfDatasets.enhance(netcdfFile, NetcdfDataset.getDefaultEnhanceMode(), null);
// Closing the netcdf dataset should close the "wrapped" NetcdfFile acquired from cache
netcdfDataset.close();

assertNoFilesAreLocked();
}

private static void assertNoFilesAreLocked() {
FileCacheIF cache = NetcdfDatasets.getNetcdfFileCache();
boolean isAnyFileLocked = cache.showCache().stream().anyMatch(entry -> entry.startsWith("true"));
assertWithMessage(cache.showCache().toString()).that(isAnyFileLocked).isFalse();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class TestRandomAccessFileCacheCleanup {
@BeforeClass
public static void enableCache() {
RandomAccessFile.shutdown();
cache = new FileCache("RandomAccessFile", 0, 1, 1, 0);
cache = new FileCache("RandomAccessFile", 0, 1, 1, 0, true);
RandomAccessFile.setGlobalFileCache(cache);
assertThat(cache.showCache().size()).isEqualTo(0);
}
Expand Down
80 changes: 79 additions & 1 deletion cdm/s3/src/test/java/thredds/inventory/s3/TestMFileS3.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,18 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.services.s3.model.NoSuchKeyException;
import thredds.filesystem.MFileOS;
import thredds.inventory.MFile;
import thredds.inventory.s3.MFileS3.Provider;
import ucar.unidata.io.s3.CdmS3Uri;
import ucar.unidata.io.s3.S3TestsCommon;
import ucar.unidata.util.test.category.NotPullRequest;

Expand Down Expand Up @@ -120,6 +124,7 @@ public void bucketAndKeyOsdc() throws IOException {
public void dirCheckAws() throws IOException {
dirCheckNoDelim(AWS_G16_S3_URI_DIR, G16_DIR);
dirCheckDelim(AWS_G16_S3_URI_DIR + DELIMITER_FRAGMENT);
dirCheckDelim(AWS_G16_S3_URI_DIR + "/" + DELIMITER_FRAGMENT);
}

@Test
Expand All @@ -143,6 +148,20 @@ public void shouldReturnTopLevelKeyName() throws IOException {
assertThat(fileWithDelimiter.getName()).isEqualTo(topLevelDir);
}

@Test
public void shouldCompareSameMFile() throws IOException {
final MFile mFile = new MFileS3(AWS_G16_S3_OBJECT_1);
assertThat(mFile.equals(mFile)).isTrue();
assertThat(mFile.compareTo(mFile)).isEqualTo(0);
}

@Test
public void shouldCompareToDifferentClass() throws IOException {
final MFile mFile1 = new MFileS3(AWS_G16_S3_OBJECT_1);
final MFile mFile2 = new MFileOS("test");
assertThat(mFile1.equals(mFile2)).isFalse();
}

@Test
public void compareMFilesAws() throws IOException {
for (String delimiter : DELIMITER_FRAGMENTS) {
Expand Down Expand Up @@ -345,6 +364,60 @@ public void shouldGetInputStream() throws IOException {
}
}


@Test
public void shouldGetLastModifiedForExistingFile() throws IOException {
final MFile mFile = new MFileS3(AWS_G16_S3_OBJECT_1);
assertThat(mFile.getLastModified()).isGreaterThan(0);

final MFile mFile2 = new MFileS3(AWS_G16_S3_OBJECT_1, 0, -1);
assertThat(mFile2.getLastModified()).isGreaterThan(0);

final MFile mFile3 = new MFileS3(AWS_G16_S3_OBJECT_1, 0, 1);
assertThat(mFile3.getLastModified()).isEqualTo(1);
}

@Test
public void shouldThrowForGetLastModifiedOnNonExistingFile() throws IOException {
final MFile mFile = new MFileS3(AWS_G16_S3_URI_DIR + "/NotARealKey");
assertThrows(NoSuchKeyException.class, mFile::getLastModified);
}

@Test
public void shouldGetLengthForExistingFile() throws IOException {
final MFile mFile = new MFileS3(AWS_G16_S3_OBJECT_1);
assertThat(mFile.getLength()).isGreaterThan(0);

final MFile mFile2 = new MFileS3(AWS_G16_S3_OBJECT_1, -1, 0);
assertThat(mFile2.getLength()).isGreaterThan(0);

final MFile mFile3 = new MFileS3(AWS_G16_S3_OBJECT_1, 1, 0);
assertThat(mFile3.getLength()).isEqualTo(1);
}

@Test
public void shouldThrowForGetLengthOnNonExistingFile() throws IOException {
final MFile mFile = new MFileS3(AWS_G16_S3_URI_DIR + "/NotARealKey");
assertThrows(NoSuchKeyException.class, mFile::getLength);
}

@Test
public void shouldGetProtocol() {
assertThat(new Provider().getProtocol()).isEqualTo("cdms3");
}

@Test
public void shouldCreateMFile() throws IOException {
final MFile mFile = new Provider().create(AWS_G16_S3_OBJECT_1);
assertThat(mFile.exists()).isTrue();
}

@Test
public void shouldCreateMFileUsingCdms3Uri() throws URISyntaxException {
final MFile mFile = new MFileS3(new CdmS3Uri(AWS_G16_S3_OBJECT_1));
assertThat(mFile.exists()).isTrue();
}

private void checkWithBucket(String cdmS3Uri) throws IOException {
logger.info("Checking {}", cdmS3Uri);
MFile mFile = new MFileS3(cdmS3Uri);
Expand Down Expand Up @@ -393,7 +466,8 @@ private void dirCheckDelim(String cdmS3Uri) throws IOException {
MFile parent = mFile.getParent();
// Since we have a delimiter, and the object key contains the delimiter, we know this should not be null.
assertThat(parent).isNotNull();
assertThat(parent.getPath()).isEqualTo(cdmS3Uri.replace("/" + dirName, "/"));
assertThat(parent.getPath())
.isEqualTo(cdmS3Uri.replace("/" + dirName, "/").replace(parentDirName + "//", parentDirName + "/"));
assertThat(parent.getName()).isEqualTo(parentDirName);
assertThat(parent.isDirectory()).isTrue();
}
Expand All @@ -404,8 +478,12 @@ private void compareS3Mfiles(String uri1, String uri2) throws IOException {
MFile mFile3 = new MFileS3(uri2);
assert mFile1.equals(mFile2);
assertThat(mFile1).isEqualTo(mFile2);
assertThat(mFile1.compareTo(mFile2)).isEqualTo(0);
assertThat(mFile1.hashCode()).isEqualTo(mFile2.hashCode());
assertThat(uri1).ignoringCase().isNotEqualTo(uri2);
assertThat(mFile1).isNotEqualTo(mFile3);
assertThat(mFile1.compareTo(mFile3)).isNotEqualTo(0);
assertThat(mFile1.hashCode()).isNotEqualTo(mFile3.hashCode());
}

private void checkS3MFilesAuxInfo(String uri) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -852,6 +852,7 @@ private CoverageCoordAxis makeRuntimeAuxCoord(CoordinateTimeAbstract time) {
CoverageCoordAxisBuilder builder =
new CoverageCoordAxisBuilder(refName, master.getUnit(), Grib.GRIB_RUNTIME, DataType.DOUBLE, AxisType.RunTime,
atts, CoverageCoordAxis.DependenceType.dependent, time.getName(), null, length, 0, 0, 0, data, this);
builder.setMissingTolerance(0.0);
builder.setSpacingFromValues(false);

return new CoverageCoordAxis1D(builder);
Expand Down

0 comments on commit 343faf1

Please sign in to comment.