Skip to content

Commit

Permalink
Merge branch 'Unidata:maint-5.x' into handle-zero-dimension-2
Browse files Browse the repository at this point in the history
  • Loading branch information
mnlerman authored Oct 4, 2022
2 parents 5c4bce4 + 6fa510a commit 44a82ad
Show file tree
Hide file tree
Showing 14 changed files with 162 additions and 53 deletions.
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ buildscript {
// The buildscript {} block is odd: even though we applied dependencies.gradle above, the repositories therein
// do not get included here. Instead, we must explicitly define the repos again. Yay for duplication.
repositories {
mavenCentral()
gradlePluginPortal()
exclusiveContent {
forRepository {
Expand Down
4 changes: 2 additions & 2 deletions cdm/core/src/main/java/ucar/nc2/dataset/NetcdfDataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -354,8 +354,8 @@ public static synchronized void initNetcdfFileCache(int minElementsInMemory, int
@Deprecated
public static synchronized void initNetcdfFileCache(int minElementsInMemory, int maxElementsInMemory, int hardLimit,
int period) {
netcdfFileCache = new ucar.nc2.util.cache.FileCache("NetcdfFileCache ", minElementsInMemory, maxElementsInMemory,
hardLimit, period);
netcdfFileCache = new ucar.nc2.util.cache.FileCache("NetcdfFileCache (deprecated)", minElementsInMemory,
maxElementsInMemory, hardLimit, period);
}

/** @deprecated use NetcdfDatasets.disableNetcdfFileCache */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public static synchronized void initNetcdfFileCache(int minElementsInMemory, int
*/
public static synchronized void initNetcdfFileCache(int minElementsInMemory, int maxElementsInMemory, int hardLimit,
int period) {
netcdfFileCache = new FileCache("NetcdfFileCache ", minElementsInMemory, maxElementsInMemory, hardLimit, period);
netcdfFileCache = new FileCache("NetcdfFileCache", minElementsInMemory, maxElementsInMemory, hardLimit, period);
}

public static synchronized void disableNetcdfFileCache() {
Expand Down
83 changes: 52 additions & 31 deletions cdm/core/src/main/java/ucar/nc2/filter/Checksum32.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import com.google.common.primitives.Ints;

import java.nio.ByteOrder;
import java.util.Map;
import java.util.zip.Adler32;
import java.util.zip.CRC32;
Expand Down Expand Up @@ -35,17 +34,8 @@ public enum CType {

private final CType type; // type of checksum

private final ByteOrder byteOrder;

public Checksum32(CType type, ByteOrder bo) {
this.type = type;
this.byteOrder = bo;
}


public Checksum32(CType type) {
// TODO: can we do this better?
this(type, ByteOrder.LITTLE_ENDIAN);
this.type = type;
}

@Override
Expand All @@ -61,32 +51,41 @@ public int getId() {
@Override
public byte[] encode(byte[] dataIn) {
// create a checksum
int checksum = getChecksum(dataIn);
// append checksum in front of data
int checksum = (int) getChecksum(dataIn);
// append checksum in front or behind data
// Adler and CRC are supported by Zarr, which follows the NumCodec spec with a checksum before the data
// Fletcher is support by hdf5, which has the checksum after the data
byte[] dataOut = new byte[dataIn.length + nbytes];
System.arraycopy(dataIn, 0, dataOut, nbytes, dataIn.length);
System.arraycopy(Ints.toByteArray(checksum), 0, dataOut, 0, nbytes);;
int dataStart = this.type == CType.FLETCHER ? 0 : nbytes;
System.arraycopy(dataIn, 0, dataOut, dataStart, dataIn.length);
int checksumStart = this.type == CType.FLETCHER ? dataOut.length - nbytes : 0;
// encode as little endian by default
System.arraycopy(Ints.toByteArray(Integer.reverseBytes(checksum)), 0, dataOut, checksumStart, nbytes);;
return dataOut;
}

@Override
public byte[] decode(byte[] dataIn) {
// strip the checksum
byte[] dataOut = new byte[dataIn.length - nbytes];
System.arraycopy(dataIn, nbytes, dataOut, 0, dataOut.length);
// Adler and CRC are supported by Zarr, which follows the NumCodec spec with a checksum before the data
// Fletcher is support by hdf5, which has the checksum after the data
int dataStart = this.type == CType.FLETCHER ? 0 : nbytes;
System.arraycopy(dataIn, dataStart, dataOut, 0, dataOut.length);
// verify checksum
int checksum = getChecksum(dataOut);
int checksum = (int) getChecksum(dataOut);
byte[] bytes = new byte[nbytes];
System.arraycopy(dataIn, 0, bytes, 0, nbytes);
int i = Ints.fromByteArray(bytes);
int checksumStart = this.type == CType.FLETCHER ? dataIn.length - nbytes : 0;
System.arraycopy(dataIn, checksumStart, bytes, 0, nbytes);
int i = Integer.reverseBytes(Ints.fromByteArray(bytes)); // convert from little endian
if (i != checksum) {
throw new RuntimeException("Checksum invalid");
}
// return data
return dataOut;
}

private int getChecksum(byte[] data) {
private long getChecksum(byte[] data) {
Checksum checksum;
switch (type) {
case ADLER:
Expand All @@ -101,18 +100,13 @@ private int getChecksum(byte[] data) {
break;
}
checksum.update(data, 0, data.length);
int val = (int) checksum.getValue();
// reverse bytes for little endian
if (this.byteOrder == ByteOrder.LITTLE_ENDIAN) {
val = Integer.reverseBytes(val);
}
return val;
return checksum.getValue();
}

private class Fletcher32 extends Adler32 {

private int sum1 = 0;
private int sum2 = 0;
private long sum1 = 0;
private long sum2 = 0;

@Override
public void update(byte[] b, int off, int len) {
Expand All @@ -122,10 +116,37 @@ public void update(byte[] b, int off, int len) {
if (off < 0 || len < 0 || off > b.length - len) {
throw new ArrayIndexOutOfBoundsException();
}
for (int i = off; i < len; i++) {
sum1 = (sum1 + (b[i] & 0xff)) % 65535;
sum2 = (sum2 + sum1) % 65535;

int i = 0;
int end = len / 2;
while (end > 0) {
int blocklen = end > 360 ? 360 : end;
end -= blocklen;
do {
sum1 += (b[i] & 0xff) << 8 | b[i + 1] & 0xff;
sum2 += sum1;
i += 2;
blocklen--;
} while (blocklen > 0);
sum1 = (sum1 & 0xffff) + (sum1 >>> 16);
sum2 = (sum2 & 0xffff) + (sum2 >>> 16);
}

// handle odd # of bytes
if (len % 2 > 0) {
sum1 += (b[len - 1] & 0xff) << 8;
sum2 += sum1;
sum1 = (sum1 & 0xffff) + (sum1 >>> 16);
sum2 = (sum2 & 0xffff) + (sum2 >>> 16);
}

sum1 = (sum1 & 0xffff) + (sum1 >>> 16);
sum2 = (sum2 & 0xffff) + (sum2 >>> 16);
}

@Override
public long getValue() {
return (sum2 << 16) | sum1;
}
}

Expand Down
10 changes: 7 additions & 3 deletions cdm/core/src/main/java/ucar/nc2/ft2/coverage/HorizCoordSys.java
Original file line number Diff line number Diff line change
Expand Up @@ -657,20 +657,25 @@ private List<LatLonPoint> calcLatLonBoundaryPointsFromProjection(int maxPointsIn
final ProjectionPoint projPointInKm =
ProjectionPoint.create(convertToKm(projPoint.getX(), xAxis.units, xAxis.name),
convertToKm(projPoint.getY(), yAxis.units, yAxis.name));
latLonPoints.add(transform.getProjection().projToLatLon(projPointInKm));

final LatLonPoint latLonPoint = transform.getProjection().projToLatLon(projPointInKm);
if (!Double.isNaN(latLonPoint.getLatitude()) && !Double.isNaN(latLonPoint.getLongitude())) {
latLonPoints.add(latLonPoint);
}
}

return latLonPoints;
}

// TODO is there a better place to handle units?
// Some projections are actually just rotations (RotatedPole)
// so the "projection" coordinates have units "degrees" and don't need to be converted
private static double convertToKm(double coordinate, String unit, String axisName) {
if (unit.equals("km") || unit.equals("kilometers")) {
return coordinate;
} else if (unit.equals("m") || unit.equals("meters")) {
return 0.001 * coordinate;
} else {
logger.info("Unrecognized unit '" + unit + "' for axis '" + axisName + "'");
return coordinate;
}
}
Expand All @@ -681,7 +686,6 @@ private static double convertFromKm(double coordinateInKm, String desiredUnit, S
} else if (desiredUnit.equals("m") || desiredUnit.equals("meters")) {
return 1000 * coordinateInKm;
} else {
logger.info("Unrecognized unit '" + desiredUnit + "' for axis '" + axisName + "'");
return coordinateInKm;
}
}
Expand Down
1 change: 1 addition & 0 deletions cdm/core/src/main/java/ucar/unidata/geoloc/Projection.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
/**
* Projective geometry transformations from (lat,lon) to (x,y) on
* a projective cartesian surface.
* Unless it is a rotation from (lat,lon) to (lat,lon).
*
* @author John Caron
* LOOK will be immutable AutoValue in ver6
Expand Down
22 changes: 19 additions & 3 deletions cdm/core/src/test/java/ucar/nc2/filter/TestFilters.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@

package ucar.nc2.filter;

import com.google.common.primitives.Ints;
import org.junit.BeforeClass;
import org.junit.Test;
import ucar.unidata.io.RandomAccessFile;

import java.io.IOException;
import java.nio.ByteOrder;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;

Expand Down Expand Up @@ -61,11 +62,11 @@ public void testShuffle() throws IOException {
@Test
public void testChecksum32() throws IOException {
// test Adler32
Filter filter = new Checksum32(Checksum32.CType.ADLER, ByteOrder.LITTLE_ENDIAN);
Filter filter = new Checksum32(Checksum32.CType.ADLER);
testEncodeDecode(filter, "adler32");

// test CRC32
filter = new Checksum32(Checksum32.CType.CRC, ByteOrder.LITTLE_ENDIAN);
filter = new Checksum32(Checksum32.CType.CRC);
testEncodeDecode(filter, "crc32");
}

Expand Down Expand Up @@ -108,4 +109,19 @@ private void testEncodeDecode(Filter filter, String filename) throws IOException
assertThat(decoded).isEqualTo(decoded_data);
}

@Test
public void testFletcher() {
// test case from Wikipeda Fletcher test vectors
String testString = "abcdefgh";
int knownChecksum = -1785599007;
byte[] checksumBytes = Ints.toByteArray(knownChecksum);
Checksum32 filter = new Checksum32(Checksum32.CType.FLETCHER);
byte[] expected = testString.getBytes(StandardCharsets.UTF_8);
byte[] in = new byte[expected.length + 4];
System.arraycopy(expected, 0, in, 0, expected.length);
System.arraycopy(checksumBytes, 0, in, expected.length, 4);
byte[] out = filter.decode(in);
assertThat(out).isEqualTo(expected);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
package ucar.nc2.ft2.coverage;

import static com.google.common.truth.Truth.assertThat;

import java.lang.invoke.MethodHandles;
import java.util.List;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ucar.ma2.DataType;
import ucar.nc2.AttributeContainerMutable;
import ucar.nc2.constants.AxisType;
import ucar.nc2.constants.CF;
import ucar.nc2.ft2.coverage.CoverageCoordAxis.Spacing;
import ucar.unidata.geoloc.LatLonPointNoNormalize;
import ucar.unidata.geoloc.ProjectionPoint;

public class TestHorizCoordSys {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

@Test
public void shouldRemoveNansWhenComputingLatLon() {
// Include x,y outside of geos transform range so that there will be nans in the lat,lon
final double[] xValues = new double[] {-0.101346, 0, 0.038626};
final double[] yValues = new double[] {0.128226, 0, 0.044254};

final CoverageCoordAxis1D xAxis = createCoverageCoordAxis1D(AxisType.GeoX, xValues);
final CoverageCoordAxis1D yAxis = createCoverageCoordAxis1D(AxisType.GeoY, yValues);

final AttributeContainerMutable attributes = new AttributeContainerMutable("attributes");
attributes.addAttribute(CF.GRID_MAPPING_NAME, CF.GEOSTATIONARY);
attributes.addAttribute(CF.LONGITUDE_OF_PROJECTION_ORIGIN, -75.0);
attributes.addAttribute(CF.PERSPECTIVE_POINT_HEIGHT, 35786023.0);
attributes.addAttribute(CF.SEMI_MINOR_AXIS, 6356752.31414);
attributes.addAttribute(CF.SEMI_MAJOR_AXIS, 6378137.0);
attributes.addAttribute(CF.INVERSE_FLATTENING, 298.2572221);
attributes.addAttribute(CF.SWEEP_ANGLE_AXIS, "x");

final CoverageTransform transform = new CoverageTransform("transform", attributes, true);
final HorizCoordSys horizCoordSys = HorizCoordSys.factory(xAxis, yAxis, null, null, transform);

final List<ProjectionPoint> projectionPoints = horizCoordSys.calcProjectionBoundaryPoints();
assertThat(projectionPoints.size()).isEqualTo(12);

final List<LatLonPointNoNormalize> boundaryPoints = horizCoordSys.calcConnectedLatLonBoundaryPoints();
assertThat(boundaryPoints.size()).isEqualTo(5); // Less than the projection points because NaNs are removed

for (LatLonPointNoNormalize latLonPoint : boundaryPoints) {
assertThat(latLonPoint.getLatitude()).isNotNaN();
assertThat(latLonPoint.getLongitude()).isNotNaN();
}
}

private CoverageCoordAxis1D createCoverageCoordAxis1D(AxisType type, double[] values) {
final CoverageCoordAxisBuilder coordAxisBuilder = new CoverageCoordAxisBuilder("name", "unit", "description",
DataType.DOUBLE, type, null, CoverageCoordAxis.DependenceType.independent, null, Spacing.irregularPoint,
values.length, values[0], values[values.length - 1], values[1] - values[0], values, null);
return new CoverageCoordAxis1D(coordAxisBuilder);
}
}
2 changes: 0 additions & 2 deletions docs/src/site/pages/netcdfJava/GribFilesCdm.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,6 @@ So you can move the data files and the _gbx_ files as needed.
The CDM index files (_ncx4_) also store the names of the GRIB data files, and (usually) needs the GRIB files to exist there.
So if you move the GRIB and GRIB index files, it\'s best to delete the _ncx4_ files and re-create them after the move.

The use of external tables in GRIB is quite problematic (read here for more details). Nonetheless, GRIB files are in wide use internationally and contain invaluable data. The CDM is a general-purpose GRIB reading library that makes GRIB data available through the CDM/NetCDF API, that is, as multidimensional data arrays and CF-compliant metadata and coordinates.

## GRIB Tables

The use of external tables in GRIB is quite problematic ([read](https://doi.org/10.5065/vkan-dp10){:target="blank"} for more details).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ toc: false

The netCDF-Java source code is hosted on GitHub, and — as of v4.6.1 — we use Gradle to build it.
Ant and Maven builds are no longer supported.
To build, you need Git and JDK 8 installed (building with JDK > 8 is not yet supported, but is being addressed).
To build, you need Git and JDK 8, JDK 11, or JDK 14 installed.

First, clone the netCDF-Java repository from Github:

Expand All @@ -27,11 +27,17 @@ Change into the netcdf-java directory:
cd netcdf-java
~~~

By default, the current branch head is set to master, which is our main development branch.
If you’d like to build a released version instead — v5.0.0, for example, you’ll need to checkout that version:
By default, the current branch head is set to `maint-5.x`, which is our main development branch.
If you’d like to build a released version instead, you can see all the release tags using:
~~~bash
git tag
~~~

We recommend that you choose the latest release.
To choose release version {{site.docset_version}}.0, for example, you’ll need to checkout that version's tag:

~~~bash
git checkout v5.0.0
git checkout v{{site.docset_version}}.0
~~~

Next, use the Gradle wrapper to execute the assemble task:
Expand All @@ -50,11 +56,11 @@ NetCDF-Java is comprised of several modules, many of which you can use within yo
At Unidata, we publish the artifacts that those modules generate to our Nexus repository.

However, it may happen that you need artifacts for the in-development version of netCDF-Java in your local branch, which we usually don’t upload to Nexus.
We do publish nightly SNAPSHOTS, but those may not have the develoment changes you are currently working on.
We do publish nightly SNAPSHOTS, but those may not have the development changes you are currently working on.
Never fear: you can build them yourself and publish them to your local Maven repository!

~~~
git checkout master
git checkout maint-5.x
./gradlew publishToMavenLocal
~~~

Expand Down
Loading

0 comments on commit 44a82ad

Please sign in to comment.