diff --git a/build.gradle b/build.gradle index ec2cf5362b..46d367fccd 100644 --- a/build.gradle +++ b/build.gradle @@ -9,6 +9,7 @@ buildscript { // The buildscript {} block is odd: even though we applied dependencies.gradle above, the repositories therein // do not get included here. Instead, we must explicitly define the repos again. Yay for duplication. repositories { + mavenCentral() gradlePluginPortal() exclusiveContent { forRepository { diff --git a/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDataset.java b/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDataset.java index b1c1e53005..642152cee4 100644 --- a/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDataset.java +++ b/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDataset.java @@ -354,8 +354,8 @@ public static synchronized void initNetcdfFileCache(int minElementsInMemory, int @Deprecated public static synchronized void initNetcdfFileCache(int minElementsInMemory, int maxElementsInMemory, int hardLimit, int period) { - netcdfFileCache = new ucar.nc2.util.cache.FileCache("NetcdfFileCache ", minElementsInMemory, maxElementsInMemory, - hardLimit, period); + netcdfFileCache = new ucar.nc2.util.cache.FileCache("NetcdfFileCache (deprecated)", minElementsInMemory, + maxElementsInMemory, hardLimit, period); } /** @deprecated use NetcdfDatasets.disableNetcdfFileCache */ diff --git a/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDatasets.java b/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDatasets.java index 3709727920..588d6abada 100644 --- a/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDatasets.java +++ b/cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDatasets.java @@ -63,7 +63,7 @@ public static synchronized void initNetcdfFileCache(int minElementsInMemory, int */ public static synchronized void initNetcdfFileCache(int minElementsInMemory, int maxElementsInMemory, int hardLimit, int period) { - netcdfFileCache = new FileCache("NetcdfFileCache ", minElementsInMemory, maxElementsInMemory, hardLimit, period); + netcdfFileCache = new FileCache("NetcdfFileCache", minElementsInMemory, maxElementsInMemory, hardLimit, period); } public static synchronized void disableNetcdfFileCache() { diff --git a/cdm/core/src/main/java/ucar/nc2/filter/Checksum32.java b/cdm/core/src/main/java/ucar/nc2/filter/Checksum32.java index b75bddce76..1a4f9d6d99 100644 --- a/cdm/core/src/main/java/ucar/nc2/filter/Checksum32.java +++ b/cdm/core/src/main/java/ucar/nc2/filter/Checksum32.java @@ -7,7 +7,6 @@ import com.google.common.primitives.Ints; -import java.nio.ByteOrder; import java.util.Map; import java.util.zip.Adler32; import java.util.zip.CRC32; @@ -35,17 +34,8 @@ public enum CType { private final CType type; // type of checksum - private final ByteOrder byteOrder; - - public Checksum32(CType type, ByteOrder bo) { - this.type = type; - this.byteOrder = bo; - } - - public Checksum32(CType type) { - // TODO: can we do this better? - this(type, ByteOrder.LITTLE_ENDIAN); + this.type = type; } @Override @@ -61,11 +51,16 @@ public int getId() { @Override public byte[] encode(byte[] dataIn) { // create a checksum - int checksum = getChecksum(dataIn); - // append checksum in front of data + int checksum = (int) getChecksum(dataIn); + // append checksum in front or behind data + // Adler and CRC are supported by Zarr, which follows the NumCodec spec with a checksum before the data + // Fletcher is support by hdf5, which has the checksum after the data byte[] dataOut = new byte[dataIn.length + nbytes]; - System.arraycopy(dataIn, 0, dataOut, nbytes, dataIn.length); - System.arraycopy(Ints.toByteArray(checksum), 0, dataOut, 0, nbytes);; + int dataStart = this.type == CType.FLETCHER ? 0 : nbytes; + System.arraycopy(dataIn, 0, dataOut, dataStart, dataIn.length); + int checksumStart = this.type == CType.FLETCHER ? dataOut.length - nbytes : 0; + // encode as little endian by default + System.arraycopy(Ints.toByteArray(Integer.reverseBytes(checksum)), 0, dataOut, checksumStart, nbytes);; return dataOut; } @@ -73,12 +68,16 @@ public byte[] encode(byte[] dataIn) { public byte[] decode(byte[] dataIn) { // strip the checksum byte[] dataOut = new byte[dataIn.length - nbytes]; - System.arraycopy(dataIn, nbytes, dataOut, 0, dataOut.length); + // Adler and CRC are supported by Zarr, which follows the NumCodec spec with a checksum before the data + // Fletcher is support by hdf5, which has the checksum after the data + int dataStart = this.type == CType.FLETCHER ? 0 : nbytes; + System.arraycopy(dataIn, dataStart, dataOut, 0, dataOut.length); // verify checksum - int checksum = getChecksum(dataOut); + int checksum = (int) getChecksum(dataOut); byte[] bytes = new byte[nbytes]; - System.arraycopy(dataIn, 0, bytes, 0, nbytes); - int i = Ints.fromByteArray(bytes); + int checksumStart = this.type == CType.FLETCHER ? dataIn.length - nbytes : 0; + System.arraycopy(dataIn, checksumStart, bytes, 0, nbytes); + int i = Integer.reverseBytes(Ints.fromByteArray(bytes)); // convert from little endian if (i != checksum) { throw new RuntimeException("Checksum invalid"); } @@ -86,7 +85,7 @@ public byte[] decode(byte[] dataIn) { return dataOut; } - private int getChecksum(byte[] data) { + private long getChecksum(byte[] data) { Checksum checksum; switch (type) { case ADLER: @@ -101,18 +100,13 @@ private int getChecksum(byte[] data) { break; } checksum.update(data, 0, data.length); - int val = (int) checksum.getValue(); - // reverse bytes for little endian - if (this.byteOrder == ByteOrder.LITTLE_ENDIAN) { - val = Integer.reverseBytes(val); - } - return val; + return checksum.getValue(); } private class Fletcher32 extends Adler32 { - private int sum1 = 0; - private int sum2 = 0; + private long sum1 = 0; + private long sum2 = 0; @Override public void update(byte[] b, int off, int len) { @@ -122,10 +116,37 @@ public void update(byte[] b, int off, int len) { if (off < 0 || len < 0 || off > b.length - len) { throw new ArrayIndexOutOfBoundsException(); } - for (int i = off; i < len; i++) { - sum1 = (sum1 + (b[i] & 0xff)) % 65535; - sum2 = (sum2 + sum1) % 65535; + + int i = 0; + int end = len / 2; + while (end > 0) { + int blocklen = end > 360 ? 360 : end; + end -= blocklen; + do { + sum1 += (b[i] & 0xff) << 8 | b[i + 1] & 0xff; + sum2 += sum1; + i += 2; + blocklen--; + } while (blocklen > 0); + sum1 = (sum1 & 0xffff) + (sum1 >>> 16); + sum2 = (sum2 & 0xffff) + (sum2 >>> 16); + } + + // handle odd # of bytes + if (len % 2 > 0) { + sum1 += (b[len - 1] & 0xff) << 8; + sum2 += sum1; + sum1 = (sum1 & 0xffff) + (sum1 >>> 16); + sum2 = (sum2 & 0xffff) + (sum2 >>> 16); } + + sum1 = (sum1 & 0xffff) + (sum1 >>> 16); + sum2 = (sum2 & 0xffff) + (sum2 >>> 16); + } + + @Override + public long getValue() { + return (sum2 << 16) | sum1; } } diff --git a/cdm/core/src/main/java/ucar/nc2/ft2/coverage/HorizCoordSys.java b/cdm/core/src/main/java/ucar/nc2/ft2/coverage/HorizCoordSys.java index 342502fa27..b4fa69e12c 100644 --- a/cdm/core/src/main/java/ucar/nc2/ft2/coverage/HorizCoordSys.java +++ b/cdm/core/src/main/java/ucar/nc2/ft2/coverage/HorizCoordSys.java @@ -657,20 +657,25 @@ private List calcLatLonBoundaryPointsFromProjection(int maxPointsIn final ProjectionPoint projPointInKm = ProjectionPoint.create(convertToKm(projPoint.getX(), xAxis.units, xAxis.name), convertToKm(projPoint.getY(), yAxis.units, yAxis.name)); - latLonPoints.add(transform.getProjection().projToLatLon(projPointInKm)); + + final LatLonPoint latLonPoint = transform.getProjection().projToLatLon(projPointInKm); + if (!Double.isNaN(latLonPoint.getLatitude()) && !Double.isNaN(latLonPoint.getLongitude())) { + latLonPoints.add(latLonPoint); + } } return latLonPoints; } // TODO is there a better place to handle units? + // Some projections are actually just rotations (RotatedPole) + // so the "projection" coordinates have units "degrees" and don't need to be converted private static double convertToKm(double coordinate, String unit, String axisName) { if (unit.equals("km") || unit.equals("kilometers")) { return coordinate; } else if (unit.equals("m") || unit.equals("meters")) { return 0.001 * coordinate; } else { - logger.info("Unrecognized unit '" + unit + "' for axis '" + axisName + "'"); return coordinate; } } @@ -681,7 +686,6 @@ private static double convertFromKm(double coordinateInKm, String desiredUnit, S } else if (desiredUnit.equals("m") || desiredUnit.equals("meters")) { return 1000 * coordinateInKm; } else { - logger.info("Unrecognized unit '" + desiredUnit + "' for axis '" + axisName + "'"); return coordinateInKm; } } diff --git a/cdm/core/src/main/java/ucar/unidata/geoloc/Projection.java b/cdm/core/src/main/java/ucar/unidata/geoloc/Projection.java index 80f8652a1a..8996e9e7c8 100644 --- a/cdm/core/src/main/java/ucar/unidata/geoloc/Projection.java +++ b/cdm/core/src/main/java/ucar/unidata/geoloc/Projection.java @@ -10,6 +10,7 @@ /** * Projective geometry transformations from (lat,lon) to (x,y) on * a projective cartesian surface. + * Unless it is a rotation from (lat,lon) to (lat,lon). * * @author John Caron * LOOK will be immutable AutoValue in ver6 diff --git a/cdm/core/src/test/java/ucar/nc2/filter/TestFilters.java b/cdm/core/src/test/java/ucar/nc2/filter/TestFilters.java index 09a50b99f7..d7e6943178 100644 --- a/cdm/core/src/test/java/ucar/nc2/filter/TestFilters.java +++ b/cdm/core/src/test/java/ucar/nc2/filter/TestFilters.java @@ -5,12 +5,13 @@ package ucar.nc2.filter; +import com.google.common.primitives.Ints; import org.junit.BeforeClass; import org.junit.Test; import ucar.unidata.io.RandomAccessFile; import java.io.IOException; -import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -61,11 +62,11 @@ public void testShuffle() throws IOException { @Test public void testChecksum32() throws IOException { // test Adler32 - Filter filter = new Checksum32(Checksum32.CType.ADLER, ByteOrder.LITTLE_ENDIAN); + Filter filter = new Checksum32(Checksum32.CType.ADLER); testEncodeDecode(filter, "adler32"); // test CRC32 - filter = new Checksum32(Checksum32.CType.CRC, ByteOrder.LITTLE_ENDIAN); + filter = new Checksum32(Checksum32.CType.CRC); testEncodeDecode(filter, "crc32"); } @@ -108,4 +109,19 @@ private void testEncodeDecode(Filter filter, String filename) throws IOException assertThat(decoded).isEqualTo(decoded_data); } + @Test + public void testFletcher() { + // test case from Wikipeda Fletcher test vectors + String testString = "abcdefgh"; + int knownChecksum = -1785599007; + byte[] checksumBytes = Ints.toByteArray(knownChecksum); + Checksum32 filter = new Checksum32(Checksum32.CType.FLETCHER); + byte[] expected = testString.getBytes(StandardCharsets.UTF_8); + byte[] in = new byte[expected.length + 4]; + System.arraycopy(expected, 0, in, 0, expected.length); + System.arraycopy(checksumBytes, 0, in, expected.length, 4); + byte[] out = filter.decode(in); + assertThat(out).isEqualTo(expected); + } + } diff --git a/cdm/core/src/test/java/ucar/nc2/ft2/coverage/TestHorizCoordSys.java b/cdm/core/src/test/java/ucar/nc2/ft2/coverage/TestHorizCoordSys.java new file mode 100644 index 0000000000..096e82ff56 --- /dev/null +++ b/cdm/core/src/test/java/ucar/nc2/ft2/coverage/TestHorizCoordSys.java @@ -0,0 +1,60 @@ +package ucar.nc2.ft2.coverage; + +import static com.google.common.truth.Truth.assertThat; + +import java.lang.invoke.MethodHandles; +import java.util.List; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ucar.ma2.DataType; +import ucar.nc2.AttributeContainerMutable; +import ucar.nc2.constants.AxisType; +import ucar.nc2.constants.CF; +import ucar.nc2.ft2.coverage.CoverageCoordAxis.Spacing; +import ucar.unidata.geoloc.LatLonPointNoNormalize; +import ucar.unidata.geoloc.ProjectionPoint; + +public class TestHorizCoordSys { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Test + public void shouldRemoveNansWhenComputingLatLon() { + // Include x,y outside of geos transform range so that there will be nans in the lat,lon + final double[] xValues = new double[] {-0.101346, 0, 0.038626}; + final double[] yValues = new double[] {0.128226, 0, 0.044254}; + + final CoverageCoordAxis1D xAxis = createCoverageCoordAxis1D(AxisType.GeoX, xValues); + final CoverageCoordAxis1D yAxis = createCoverageCoordAxis1D(AxisType.GeoY, yValues); + + final AttributeContainerMutable attributes = new AttributeContainerMutable("attributes"); + attributes.addAttribute(CF.GRID_MAPPING_NAME, CF.GEOSTATIONARY); + attributes.addAttribute(CF.LONGITUDE_OF_PROJECTION_ORIGIN, -75.0); + attributes.addAttribute(CF.PERSPECTIVE_POINT_HEIGHT, 35786023.0); + attributes.addAttribute(CF.SEMI_MINOR_AXIS, 6356752.31414); + attributes.addAttribute(CF.SEMI_MAJOR_AXIS, 6378137.0); + attributes.addAttribute(CF.INVERSE_FLATTENING, 298.2572221); + attributes.addAttribute(CF.SWEEP_ANGLE_AXIS, "x"); + + final CoverageTransform transform = new CoverageTransform("transform", attributes, true); + final HorizCoordSys horizCoordSys = HorizCoordSys.factory(xAxis, yAxis, null, null, transform); + + final List projectionPoints = horizCoordSys.calcProjectionBoundaryPoints(); + assertThat(projectionPoints.size()).isEqualTo(12); + + final List boundaryPoints = horizCoordSys.calcConnectedLatLonBoundaryPoints(); + assertThat(boundaryPoints.size()).isEqualTo(5); // Less than the projection points because NaNs are removed + + for (LatLonPointNoNormalize latLonPoint : boundaryPoints) { + assertThat(latLonPoint.getLatitude()).isNotNaN(); + assertThat(latLonPoint.getLongitude()).isNotNaN(); + } + } + + private CoverageCoordAxis1D createCoverageCoordAxis1D(AxisType type, double[] values) { + final CoverageCoordAxisBuilder coordAxisBuilder = new CoverageCoordAxisBuilder("name", "unit", "description", + DataType.DOUBLE, type, null, CoverageCoordAxis.DependenceType.independent, null, Spacing.irregularPoint, + values.length, values[0], values[values.length - 1], values[1] - values[0], values, null); + return new CoverageCoordAxis1D(coordAxisBuilder); + } +} diff --git a/docs/src/site/pages/netcdfJava/GribFilesCdm.md b/docs/src/site/pages/netcdfJava/GribFilesCdm.md index 92ab21bbf2..cdd8947e1c 100644 --- a/docs/src/site/pages/netcdfJava/GribFilesCdm.md +++ b/docs/src/site/pages/netcdfJava/GribFilesCdm.md @@ -69,8 +69,6 @@ So you can move the data files and the _gbx_ files as needed. The CDM index files (_ncx4_) also store the names of the GRIB data files, and (usually) needs the GRIB files to exist there. So if you move the GRIB and GRIB index files, it\'s best to delete the _ncx4_ files and re-create them after the move. -The use of external tables in GRIB is quite problematic (read here for more details). Nonetheless, GRIB files are in wide use internationally and contain invaluable data. The CDM is a general-purpose GRIB reading library that makes GRIB data available through the CDM/NetCDF API, that is, as multidimensional data arrays and CF-compliant metadata and coordinates. - ## GRIB Tables The use of external tables in GRIB is quite problematic ([read](https://doi.org/10.5065/vkan-dp10){:target="blank"} for more details). diff --git a/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md b/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md index 05cdcfaeab..73e78dc322 100644 --- a/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md +++ b/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md @@ -10,7 +10,7 @@ toc: false The netCDF-Java source code is hosted on GitHub, and — as of v4.6.1 — we use Gradle to build it. Ant and Maven builds are no longer supported. -To build, you need Git and JDK 8 installed (building with JDK > 8 is not yet supported, but is being addressed). +To build, you need Git and JDK 8, JDK 11, or JDK 14 installed. First, clone the netCDF-Java repository from Github: @@ -27,11 +27,17 @@ Change into the netcdf-java directory: cd netcdf-java ~~~ -By default, the current branch head is set to master, which is our main development branch. -If you’d like to build a released version instead — v5.0.0, for example, you’ll need to checkout that version: +By default, the current branch head is set to `maint-5.x`, which is our main development branch. +If you’d like to build a released version instead, you can see all the release tags using: +~~~bash +git tag +~~~ + +We recommend that you choose the latest release. +To choose release version {{site.docset_version}}.0, for example, you’ll need to checkout that version's tag: ~~~bash -git checkout v5.0.0 +git checkout v{{site.docset_version}}.0 ~~~ Next, use the Gradle wrapper to execute the assemble task: @@ -50,11 +56,11 @@ NetCDF-Java is comprised of several modules, many of which you can use within yo At Unidata, we publish the artifacts that those modules generate to our Nexus repository. However, it may happen that you need artifacts for the in-development version of netCDF-Java in your local branch, which we usually don’t upload to Nexus. -We do publish nightly SNAPSHOTS, but those may not have the develoment changes you are currently working on. +We do publish nightly SNAPSHOTS, but those may not have the development changes you are currently working on. Never fear: you can build them yourself and publish them to your local Maven repository! ~~~ -git checkout master +git checkout maint-5.x ./gradlew publishToMavenLocal ~~~ diff --git a/gradle/any/gretty.gradle b/gradle/any/gretty.gradle index b1b2f67d1d..f14c381668 100644 --- a/gradle/any/gretty.gradle +++ b/gradle/any/gretty.gradle @@ -5,6 +5,7 @@ buildscript { apply from: "$rootDir/gradle/any/shared-mvn-coords.gradle" repositories { + mavenCentral() gradlePluginPortal() } dependencies { @@ -16,14 +17,14 @@ apply from: "$rootDir/gradle/any/properties.gradle" apply plugin: 'org.gretty' apply plugin: 'jacoco' -// when applying the gretty buildScript to a project, make sure that project has access to the gradlePluginPortal -// which is where the gretty tomcat runners live +// when applying the gretty buildScript to a project, make sure that project has access the gretty tomcat runners repositories { exclusiveContent { forRepository { + mavenCentral() gradlePluginPortal() } - // only look for unidata gretty related artifacts from the gradlePluginPortal + // only look for unidata gretty related artifacts from the above repos filter { includeGroup 'org.gretty' } diff --git a/gradle/any/shared-mvn-coords.gradle b/gradle/any/shared-mvn-coords.gradle index 77b2e20f14..dbeef006b1 100644 --- a/gradle/any/shared-mvn-coords.gradle +++ b/gradle/any/shared-mvn-coords.gradle @@ -7,7 +7,7 @@ ext { // plugin version management buildPlugins = [:] - buildPlugins.gretty = 'org.gretty:gretty:3.0.3' + buildPlugins.gretty = 'org.gretty:gretty:3.0.9' buildPlugins.shadow = 'com.github.jengelman.gradle.plugins:shadow:5.2.0' buildPlugins.sonarqube = 'org.sonarsource.scanner.gradle:sonarqube-gradle-plugin:3.0' buildPlugins.spotless = 'com.diffplug.spotless:spotless-plugin-gradle:4.5.1' diff --git a/gradle/root/publishing.gradle b/gradle/root/publishing.gradle index 45001cdd3e..7cfee766a9 100644 --- a/gradle/root/publishing.gradle +++ b/gradle/root/publishing.gradle @@ -6,6 +6,7 @@ buildscript { // The buildscript {} block is odd: even though we applied dependencies.gradle above, the repositories therein // do not get included here. Instead, we must explicitly define the repos again. Yay for duplication. repositories { + mavenCentral() gradlePluginPortal() exclusiveContent { forRepository { diff --git a/legacy/src/main/java/thredds/cataloggen/config/LocalDatasetSource.java b/legacy/src/main/java/thredds/cataloggen/config/LocalDatasetSource.java index fb2835750a..d6c7b77c04 100644 --- a/legacy/src/main/java/thredds/cataloggen/config/LocalDatasetSource.java +++ b/legacy/src/main/java/thredds/cataloggen/config/LocalDatasetSource.java @@ -108,7 +108,7 @@ private void checkAccessPoint() throws IOException { // Check that accessPoint file starts with accessPointHeader. if (!apFile.getPath().startsWith(aphFile.getPath()) - && !apFile.getCanonicalPath().startsWith(aphFile.getCanonicalPath())) { + && !apFile.getCanonicalFile().toPath().startsWith(aphFile.getCanonicalFile().toPath())) { String tmpMsg = "The accessPoint <" + apFile.getPath() + " or " + apFile.getCanonicalPath() + "> must start with the accessPointHeader <" + aphFile.getPath() + " or " + aphFile.getCanonicalPath() + ">.";