diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 2efd7d6e62..3c5ed37ef8 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -98,8 +98,8 @@ Once you are satisfied with your work, there is one last step to complete before ### Follow these Style Guidelines -#### Java, Groovy, and Gradle -We are using the [Google Java Style Guide](https://google.github.io/styleguide/javaguide.html) for Java, Groovy, and Gradle, with these exceptions and augmentations: +#### Java and Gradle +We are using the [Google Java Style Guide](https://google.github.io/styleguide/javaguide.html) for Java and Gradle, with these exceptions and augmentations: 1. The recommended line width is 120, not 100. Modern screens are much wider than tall, so having wider lines allows more code to fit on a screen. diff --git a/cdm/core/build.gradle b/cdm/core/build.gradle index 369a4cc19e..f02decb9ff 100644 --- a/cdm/core/build.gradle +++ b/cdm/core/build.gradle @@ -6,8 +6,6 @@ apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" apply from: "$rootDir/gradle/any/protobuf.gradle" -apply plugin: 'groovy' // For Spock tests. - dependencies { api enforcedPlatform(project(':netcdf-java-platform')) implementation 'commons-math:commons-math' @@ -32,9 +30,7 @@ dependencies { testImplementation 'com.google.truth:truth' testImplementation 'commons-io:commons-io' testImplementation 'junit:junit' - testImplementation 'org.codehaus.groovy:groovy-all' // for Spock. testImplementation 'org.mockito:mockito-core' - testImplementation 'org.spockframework:spock-core' testRuntimeOnly 'ch.qos.logback:logback-classic' } diff --git a/cdm/core/src/test/groovy/ucar/nc2/ft/point/FlattenedDatasetPointCollectionSpec.groovy b/cdm/core/src/test/groovy/ucar/nc2/ft/point/FlattenedDatasetPointCollectionSpec.groovy deleted file mode 100644 index 0b8591cf3d..0000000000 --- a/cdm/core/src/test/groovy/ucar/nc2/ft/point/FlattenedDatasetPointCollectionSpec.groovy +++ /dev/null @@ -1,196 +0,0 @@ -package ucar.nc2.ft.point - -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification -import ucar.nc2.constants.FeatureType -import ucar.nc2.ft.* -import ucar.nc2.time.CalendarDateRange -import ucar.nc2.time.CalendarDateUnit -import ucar.unidata.geoloc.EarthLocation -import ucar.unidata.geoloc.LatLonRect -import ucar.unidata.geoloc.LatLonPoint - -/** - * @author cwardgar - * @since 2015/06/26 - */ -class FlattenedDatasetPointCollectionSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(FlattenedDatasetPointCollectionSpec) - - // FDP used in all feature methods. Its getPointFeatureCollectionList() method will be stubbed to return - // different collections per test. - def fdPoint = Mock(FeatureDatasetPoint) - - PointFeature pf1, pf2, pf3, pf4, pf5, pf6, pf7, pf8, pf9 - - def setup() { // run before every feature method - setup: "create point features" - CalendarDateUnit dateUnit = CalendarDateUnit.of(null, "days since 1970-01-01 00:00:00") - DsgFeatureCollection dummyDsg = new SimplePointFeatureCollection("dummy", dateUnit, "m") - - pf1 = makePointFeat(dummyDsg, -75, -70, 630, 23, dateUnit) - pf2 = makePointFeat(dummyDsg, -60, -40, 94, 51, dateUnit) - pf3 = makePointFeat(dummyDsg, -45, -10, 1760, 88, dateUnit) - pf4 = makePointFeat(dummyDsg, -85, 20, 18940, 120, dateUnit) - pf5 = makePointFeat(dummyDsg, 0, 50, 26600, 150, dateUnit) - pf6 = makePointFeat(dummyDsg, 85, 80, 52800, 180, dateUnit) - pf7 = makePointFeat(dummyDsg, 15, 110, 1894, 200, dateUnit) - pf8 = makePointFeat(dummyDsg, 30, 140, 266, 300, dateUnit) - pf9 = makePointFeat(dummyDsg, 45, 170, 5280, 400, dateUnit) - } - - private static PointFeature makePointFeat( - DsgFeatureCollection dsg, double lat, double lon, double alt, double time, CalendarDateUnit dateUnit) { - def earthLoc = EarthLocation.create(lat, lon, alt) - - // Pass null StructureData; we only care about the metadata for these tests. - return new SimplePointFeature(dsg, earthLoc, time, time, dateUnit, null) - } - - def "handles empty FeatureDatasetPoint"() { - setup: "fdPoint returns an empty list of PointFeatureCollections" - fdPoint.getPointFeatureCollectionList() >> [] - - when: "we construct a FlattenedDatasetPointCollection using our exmpty FeatureDatasetPoint" - def flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); - - then: "the default unitsString and altUnits are used" - flattenedDatasetCol.timeUnit.udUnit == CalendarDateUnit.unixDateUnit.udUnit - flattenedDatasetCol.altUnits == null - - when: "get empty collection's iterator" - def flattenedDatasetIter = flattenedDatasetCol.getPointFeatureIterator() - - then: "iterator is empty" - !flattenedDatasetIter.hasNext() - flattenedDatasetIter.next() == null - } - - def "metadata of aggregate collection is taken from the first collection"() { - setup: "create CalendarDateUnits" - CalendarDateUnit calDateUnitAlpha = CalendarDateUnit.of(null, "d since 1970-01-01 00:00:00") - CalendarDateUnit calDateUnitBeta = CalendarDateUnit.of(null, "day since 1970-01-01 00:00:00") - CalendarDateUnit dateUnitGamma = CalendarDateUnit.of(null, "days since 1970-01-01 00:00:00") - - and: "create PointFeatureCollections" - PointFeatureCollection pointFeatColAlpha = new SimplePointFeatureCollection("Alpha", calDateUnitAlpha, "yard"); - PointFeatureCollection pointFeatColBeta = new SimplePointFeatureCollection("Beta", calDateUnitBeta, "mm") - PointFeatureCollection pointFeatColGamma = new SimplePointFeatureCollection("Gamma", dateUnitGamma, "feet") - - and: "fdPoint returns our 3 feature collections" - fdPoint.getPointFeatureCollectionList() >> [pointFeatColAlpha, pointFeatColBeta, pointFeatColGamma] - - when: "we flatten our dataset containing 3 collections into one collection" - def flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); - - then: "flattenedDatasetCol metadata objects are same as pointFeatColAlpha's" - flattenedDatasetCol.timeUnit.is pointFeatColAlpha.timeUnit - flattenedDatasetCol.altUnits.is pointFeatColAlpha.altUnits - } - - def "all kinds of empty"() { - setup: "create an empty instance of each of the DsgFeatureCollection types" - PointFeatureCollection emptyC = new SimplePointFeatureCollection("emptyC", null, "m") - PointFeatureCC emptyCC = new SimplePointFeatureCC("emptyCC", null, "y", FeatureType.POINT) - PointFeatureCCC emptyCCC = new SimplePointFeatureCCC("emptyCCC", null, "in", FeatureType.POINT) - - and: "create a non-empty PointFeatureCC that contains an empty PointFeatureCollection" - PointFeatureCC nonEmptyCC = new SimplePointFeatureCC("nonEmptyCC", null, "y", FeatureType.POINT) - nonEmptyCC.add(emptyC) - - and: "create a non-empty PointFeatureCCC that contains both an empty and non-empty PointFeatureCC" - PointFeatureCCC nonEmptyCCC = new SimplePointFeatureCCC("nonEmptyCCC", null, "in", FeatureType.POINT) - nonEmptyCCC.add(emptyCC) - nonEmptyCCC.add(nonEmptyCC) - - and: "create a mock FeatureDatasetPoint that returns each of our DsgFeatureCollection instances" - fdPoint.getPointFeatureCollectionList() >> [ - emptyC, emptyCC, emptyCCC, nonEmptyCC, nonEmptyCCC - ] - - and: "create flattened collection from our mocked dataset" - FlattenedDatasetPointCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint) - - expect: "collection contains no PointFeatures" - flattenedDatasetCol.asList().isEmpty() - } - - def "multiple DsgFeatureCollection types in one FeatureDataset"() { - setup: "create PointFeatureCollections" - PointFeatureCollection pfc1 = new SimplePointFeatureCollection("pfc1", null, "m") - pfc1.add(pf1); - - PointFeatureCollection pfc2 = new SimplePointFeatureCollection("pfc2", null, "m") - pfc2.add(pf2) - pfc2.add(pf3) - - PointFeatureCollection pfc3 = new SimplePointFeatureCollection("pfc3", null, "m") - - PointFeatureCollection pfc4 = new SimplePointFeatureCollection("pfc4", null, "m") - pfc4.add(pf4) - - PointFeatureCollection pfc5 = new SimplePointFeatureCollection("pfc5", null, "m") - pfc5.add(pf5) - pfc5.add(pf6) - pfc5.add(pf7) - - PointFeatureCollection pfc6 = new SimplePointFeatureCollection("pfc6", null, "m") - pfc6.add(pf8) - - PointFeatureCollection pfc7 = new SimplePointFeatureCollection("pfc7", null, "m") - pfc7.add(pf9) - - and: "create PointFeatureCCs" - PointFeatureCC pfcc1 = new SimplePointFeatureCC("pfcc1", null, "m", FeatureType.POINT) - pfcc1.add(pfc1) - pfcc1.add(pfc2) - - PointFeatureCC pfcc2 = new SimplePointFeatureCC("pfcc2", null, "m", FeatureType.POINT) - pfcc2.add(pfc3) - pfcc2.add(pfc4) - - PointFeatureCC pfcc3 = new SimplePointFeatureCC("pfcc3", null, "m", FeatureType.POINT) - pfcc3.add(pfc6) - pfcc3.add(pfc7) - - and: "create PointFeatureCCC" - CalendarDateUnit dateUnit = CalendarDateUnit.of(null, "d since 1970-01-01 00:00:00") - PointFeatureCCC pfccc = new SimplePointFeatureCCC("pfccc", dateUnit, "m", FeatureType.POINT) - pfccc.add(pfcc1) - pfccc.add(pfcc2) - - and: "mock FeatureDatasetPoint to return 1 of each DsgFeatureCollection instance, then flatten it" - fdPoint.getPointFeatureCollectionList() >> [pfccc, pfc5, pfcc3] - FlattenedDatasetPointCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint) - - - expect: "before iterating over the collection, bounds are null" - flattenedDatasetCol.boundingBox == null - flattenedDatasetCol.calendarDateRange == null - - when: "get the iterator and enable bounds calculation" - PointIteratorAbstract flattenedPointIter = flattenedDatasetCol.getPointFeatureIterator() as PointIteratorAbstract - flattenedPointIter.calculateBounds = flattenedDatasetCol.info - - and: "iterate over the collection" - def actualPointFeats = [] - flattenedPointIter.withCloseable { - for (PointFeature pointFeat : flattenedPointIter) { - actualPointFeats << pointFeat - } - } - - then: "the 9 PointFeatures are returned in order" - actualPointFeats == [pf1, pf2, pf3, pf4, pf5, pf6, pf7, pf8, pf9] - - and: "the bounds include all 9 PointFeatures" - flattenedDatasetCol.size() == 9 - flattenedDatasetCol.boundingBox == new LatLonRect(LatLonPoint.create(-85, -70), LatLonPoint.create(85, 170)) - - and: - def calDateUnit = flattenedDatasetCol.timeUnit - flattenedDatasetCol.calendarDateRange == CalendarDateRange.of( - calDateUnit.makeCalendarDate(23), calDateUnit.makeCalendarDate(400)) - } -} diff --git a/cdm/core/src/test/groovy/ucar/nc2/ft/point/PointIteratorFilteredSpec.groovy b/cdm/core/src/test/groovy/ucar/nc2/ft/point/PointIteratorFilteredSpec.groovy deleted file mode 100644 index d4dac4e735..0000000000 --- a/cdm/core/src/test/groovy/ucar/nc2/ft/point/PointIteratorFilteredSpec.groovy +++ /dev/null @@ -1,80 +0,0 @@ -package ucar.nc2.ft.point - -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification -import ucar.ma2.Array -import ucar.ma2.ArrayObject -import ucar.ma2.StructureData -import ucar.nc2.ft.FeatureDatasetPoint -import ucar.nc2.ft.PointFeature -import ucar.nc2.ft.PointFeatureCollection -import ucar.nc2.ft.PointFeatureIterator -import ucar.nc2.time.CalendarDate -import ucar.nc2.time.CalendarDateRange -import ucar.nc2.time.CalendarDateUnit -import ucar.unidata.geoloc.LatLonPoint -import ucar.unidata.geoloc.LatLonRect - -/** - * @author cwardgar - * @since 2015/09/21 - */ -class PointIteratorFilteredSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(PointIteratorFilteredSpec) - - def "space and time filter"() { - setup: "feature dataset" - FeatureDatasetPoint fdPoint = PointTestUtil.openPointDataset("pointsToFilter.ncml") - - and: "bouding box" - double latMin = +10.0 - double latMax = +50.0 - double lonMin = -60.0 - double lonMax = +10.0 - LatLonRect filter_bb = new LatLonRect( - LatLonPoint.create(latMin, lonMin), LatLonPoint.create(latMax, lonMax)) - - and: "time range" - CalendarDateUnit calDateUnit = CalendarDateUnit.of("standard", "days since 1970-01-01 00:00:00") - CalendarDate start = calDateUnit.makeCalendarDate(20) - CalendarDate end = calDateUnit.makeCalendarDate(130) - CalendarDateRange filter_date = CalendarDateRange.of(start, end) - - and: "filtered point iterator" - PointFeatureCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint) - PointFeatureIterator pointIterOrig = flattenedDatasetCol.getPointFeatureIterator() - PointFeatureIterator pointIterFiltered = new PointIteratorFiltered(pointIterOrig, filter_bb, filter_date) - - expect: - getIdsOfPoints(pointIterFiltered) == ['BBB', 'EEE'] - - when: "we call next() when there are no more elements" - pointIterFiltered.next() - then: "an exception is thrown" - NoSuchElementException e = thrown() - e.message == 'This iterator has no more elements.' - - cleanup: - pointIterFiltered?.close() - fdPoint?.close() - } - - def getIdsOfPoints(PointFeatureIterator iter) { - def ids = [] - while (iter.hasNext()) { - iter.hasNext(); // Test idempotency. This call should have no effect. - ids << getIdOfPoint(iter.next()) - } - return ids - } - - private static String getIdOfPoint(PointFeature pointFeat) throws IOException { - StructureData data = pointFeat.getFeatureData() - Array memberArray = data.getArray("id"); - assert memberArray instanceof ArrayObject.D0 - - ArrayObject.D0 memberArrayObject = memberArray as ArrayObject.D0 - return memberArrayObject.get() as String - } -} diff --git a/cdm/core/src/test/groovy/ucar/nc2/ft2/coverage/HorizCoordSysGisTextBoundarySpec.groovy b/cdm/core/src/test/groovy/ucar/nc2/ft2/coverage/HorizCoordSysGisTextBoundarySpec.groovy deleted file mode 100644 index fc8ee9977a..0000000000 --- a/cdm/core/src/test/groovy/ucar/nc2/ft2/coverage/HorizCoordSysGisTextBoundarySpec.groovy +++ /dev/null @@ -1,67 +0,0 @@ -package ucar.nc2.ft2.coverage - -import spock.lang.Specification - -/** - * Tests BoundariesAsGisText - * - * @author cwardgar - * @since 2018-03-13 - */ -class HorizCoordSysGisTextBoundarySpec extends Specification { - // Builds on HorizCoordSysCrossSeamBoundarySpec."calcConnectedLatLonBoundaryPoints(2, 3) - lat/lon 1D"() - def "getLatLonBoundaryAsWKT(2, 3) - lat/lon 1D"() { - setup: "get the HorizCoordSys of the dataset" - HorizCoordSys horizCoordSys = getHorizCoordSysOfDataset("crossSeamLatLon1D.ncml") - - and: "get actual WKT" - String actualWKT = horizCoordSys.getLatLonBoundaryAsWKT(2, 3) - - and: "declare expected WKT" - String expectedWKT = "POLYGON((" + - "130.000 0.000, 170.000 0.000, 210.000 0.000, " + // Bottom edge - "230.000 0.000, 230.000 30.000, " + // Right edge - "230.000 50.000, 190.000 50.000, 150.000 50.000, " + // Top edge - "130.000 50.000, 130.000 20.000" + // Left edge - "))" - - expect: "expected equals actual" - actualWKT == expectedWKT - println actualWKT - } - - // Builds on HorizCoordSysCrossSeamBoundarySpec."calcConnectedLatLonBoundaryPoints(2, 2) - lat/lon 2D"() - def "getLatLonBoundaryAsGeoJSON(2, 2) - lat/lon 2D"() { - setup: "get the HorizCoordSys of the dataset" - HorizCoordSys horizCoordSys = getHorizCoordSysOfDataset("crossSeamLatLon2D.ncml") - - and: "get actual GeoJSON" - String actualGeoJSON = horizCoordSys.getLatLonBoundaryAsGeoJSON(2, 2) - - and: "declare expected GeoJSON" - String expectedGeoJSON = "{ 'type': 'Polygon', 'coordinates': [ [ " + - "[-169.527, 44.874], [-145.799, 58.685], " + // Bottom edge - "[-106.007, 69.750], [-162.839, 82.356], " + // Right edge - "[-252.973, 85.132], [-221.068, 66.429], " + // Top edge - "[-206.411, 48.271], [-188.523, 47.761]" + // Left edge - " ] ] }" - - expect: "expected equals actual" - actualGeoJSON == expectedGeoJSON - println actualGeoJSON - } - - private HorizCoordSys getHorizCoordSysOfDataset(String resourceName) { - File file = new File(getClass().getResource(resourceName).toURI()) - - CoverageDatasetFactory.open(file.absolutePath).withCloseable { FeatureDatasetCoverage featDsetCov -> - // Assert that featDsetCov was opened without failure and it contains 1 CoverageCollection. - assert featDsetCov != null - assert featDsetCov.getCoverageCollections().size() == 1 - - // Return HorizCoordSys from single CoverageCollection. - CoverageCollection covColl = featDsetCov.getCoverageCollections().get(0) - return covColl.getHorizCoordSys() - } - } -} diff --git a/cdm/core/src/test/groovy/ucar/nc2/util/cache/ReacquireClosedDatasetSpec.groovy b/cdm/core/src/test/groovy/ucar/nc2/util/cache/ReacquireClosedDatasetSpec.groovy deleted file mode 100644 index 240c5f88cc..0000000000 --- a/cdm/core/src/test/groovy/ucar/nc2/util/cache/ReacquireClosedDatasetSpec.groovy +++ /dev/null @@ -1,49 +0,0 @@ -package ucar.nc2.util.cache - -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification -import ucar.nc2.dataset.DatasetUrl -import ucar.nc2.dataset.NetcdfDataset -import ucar.unidata.util.test.TestDir - -/** - * Tests caching behavior when datasets are closed and then reacquired. - * - * @author cwardgar - * @since 2016-01-02 - */ -class ReacquireClosedDatasetSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(ReacquireClosedDatasetSpec) - - def setupSpec() { - // All datasets, once opened, will be added to this cache. Config values copied from CdmInit. - NetcdfDataset.initNetcdfFileCache(100, 150, 12 * 60); - } - - def cleanupSpec() { - // Undo global changes we made in setupSpec() so that they do not affect subsequent test classes. - NetcdfDataset.shutdown(); - } - - def "reacquire"() { - setup: 'location' - String location = TestDir.cdmLocalTestDataDir + "jan.nc" - - when: 'Acquire and close dataset 4 times' - (1..4).each { - NetcdfDataset.acquireDataset(DatasetUrl.findDatasetUrl(location), true, null).close() - } - - and: 'Query cache stats' - Formatter formatter = new Formatter() - NetcdfDataset.netcdfFileCache.showStats(formatter) - - then: 'The cache will have recorded 1 miss (1st trial) and 3 hits (subsequent trials)' - // This is kludgy, but FileCache doesn't provide getHits() or getMisses() methods. - formatter.toString().trim() ==~ /hits= 3 miss= 1 nfiles= \d+ elems= \d+/ - - // Prior to 2016-03-09 bug fix in AbstractIOServiceProvider.getLastModified(), - // this would record 0 hits and 4 misses. - } -} diff --git a/cdm/core/src/test/java/ucar/nc2/ft/point/TestFlattenedDatasetPointCollection.java b/cdm/core/src/test/java/ucar/nc2/ft/point/TestFlattenedDatasetPointCollection.java new file mode 100644 index 0000000000..fbf4d1a68e --- /dev/null +++ b/cdm/core/src/test/java/ucar/nc2/ft/point/TestFlattenedDatasetPointCollection.java @@ -0,0 +1,190 @@ +package ucar.nc2.ft.point; + +import static org.mockito.Mockito.mock; +import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import ucar.nc2.constants.FeatureType; +import ucar.nc2.ft.DsgFeatureCollection; +import ucar.nc2.ft.FeatureDatasetPoint; +import ucar.nc2.ft.PointFeature; +import ucar.nc2.ft.PointFeatureCC; +import ucar.nc2.ft.PointFeatureCCC; +import ucar.nc2.ft.PointFeatureCollection; +import ucar.nc2.ft.PointFeatureIterator; +import ucar.nc2.time.CalendarDateRange; +import ucar.nc2.time.CalendarDateUnit; +import ucar.unidata.geoloc.EarthLocation; +import ucar.unidata.geoloc.LatLonPoint; +import ucar.unidata.geoloc.LatLonRect; + +public class TestFlattenedDatasetPointCollection { + + // FDP used in all feature methods. Its getPointFeatureCollectionList() method will be stubbed to return + // different collections per test. + FeatureDatasetPoint fdPoint = mock(FeatureDatasetPoint.class); + + PointFeature pf1, pf2, pf3, pf4, pf5, pf6, pf7, pf8, pf9; + + @Before + public void setup() { + // create point features + CalendarDateUnit dateUnit = CalendarDateUnit.of(null, "days since 1970-01-01 00:00:00"); + DsgFeatureCollection dummyDsg = new SimplePointFeatureCollection("dummy", dateUnit, "m"); + + pf1 = makePointFeat(dummyDsg, -75, -70, 630, 23, dateUnit); + pf2 = makePointFeat(dummyDsg, -60, -40, 94, 51, dateUnit); + pf3 = makePointFeat(dummyDsg, -45, -10, 1760, 88, dateUnit); + pf4 = makePointFeat(dummyDsg, -85, 20, 18940, 120, dateUnit); + pf5 = makePointFeat(dummyDsg, 0, 50, 26600, 150, dateUnit); + pf6 = makePointFeat(dummyDsg, 85, 80, 52800, 180, dateUnit); + pf7 = makePointFeat(dummyDsg, 15, 110, 1894, 200, dateUnit); + pf8 = makePointFeat(dummyDsg, 30, 140, 266, 300, dateUnit); + pf9 = makePointFeat(dummyDsg, 45, 170, 5280, 400, dateUnit); + } + + private static PointFeature makePointFeat(DsgFeatureCollection dsg, double lat, double lon, double alt, double time, + CalendarDateUnit dateUnit) { + EarthLocation earthLoc = EarthLocation.create(lat, lon, alt); + + // Pass null StructureData; we only care about the metadata for these tests. + return new SimplePointFeature(dsg, earthLoc, time, time, dateUnit, null); + } + + @Test + public void shouldHandleEmptyFeatureDatasetPoint() { + when(fdPoint.getPointFeatureCollectionList()).thenReturn(Collections.emptyList()); + + FlattenedDatasetPointCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); + + assertThat(flattenedDatasetCol.timeUnit.getUdUnit()).isEqualTo(CalendarDateUnit.unixDateUnit.getUdUnit()); + assertThat(flattenedDatasetCol.altUnits).isNull(); + + PointFeatureIterator flattenedDatasetIter = flattenedDatasetCol.getPointFeatureIterator(); + + assertThat(flattenedDatasetIter.hasNext()).isFalse(); + assertThat(flattenedDatasetIter.next()).isNull(); + } + + @Test + public void shouldReturnMetadataFromFirstCollection() { + CalendarDateUnit calDateUnitAlpha = CalendarDateUnit.of(null, "d since 1970-01-01 00:00:00"); + CalendarDateUnit calDateUnitBeta = CalendarDateUnit.of(null, "day since 1970-01-01 00:00:00"); + CalendarDateUnit dateUnitGamma = CalendarDateUnit.of(null, "days since 1970-01-01 00:00:00"); + + PointFeatureCollection pointFeatColAlpha = new SimplePointFeatureCollection("Alpha", calDateUnitAlpha, "yard"); + PointFeatureCollection pointFeatColBeta = new SimplePointFeatureCollection("Beta", calDateUnitBeta, "mm"); + PointFeatureCollection pointFeatColGamma = new SimplePointFeatureCollection("Gamma", dateUnitGamma, "feet"); + + when(fdPoint.getPointFeatureCollectionList()) + .thenReturn(Arrays.asList(pointFeatColAlpha, pointFeatColBeta, pointFeatColGamma)); + + FlattenedDatasetPointCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); + + assertThat(flattenedDatasetCol.timeUnit).isEqualTo(pointFeatColAlpha.getTimeUnit()); + assertThat(flattenedDatasetCol.altUnits).isEqualTo(pointFeatColAlpha.getAltUnits()); + } + + @Test + public void shouldCreateEmptyInstances() { + // create an empty instance of each of the DsgFeatureCollection types + PointFeatureCollection emptyC = new SimplePointFeatureCollection("emptyC", null, "m"); + PointFeatureCC emptyCC = new SimplePointFeatureCC("emptyCC", null, "y", FeatureType.POINT); + PointFeatureCCC emptyCCC = new SimplePointFeatureCCC("emptyCCC", null, "in", FeatureType.POINT); + + // create a non-empty PointFeatureCC that contains an empty PointFeatureCollectio + SimplePointFeatureCC nonEmptyCC = new SimplePointFeatureCC("nonEmptyCC", null, "y", FeatureType.POINT); + nonEmptyCC.add(emptyC); + + // create a non-empty PointFeatureCCC that contains both an empty and non-empty PointFeatureCC + SimplePointFeatureCCC nonEmptyCCC = new SimplePointFeatureCCC("nonEmptyCCC", null, "in", FeatureType.POINT); + nonEmptyCCC.add(emptyCC); + nonEmptyCCC.add(nonEmptyCC); + + when(fdPoint.getPointFeatureCollectionList()) + .thenReturn(Arrays.asList(emptyC, emptyCC, emptyCCC, nonEmptyCC, nonEmptyCCC)); + + FlattenedDatasetPointCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); + + // collection contains no PointFeature + PointFeatureIterator flattenedDatasetIter = flattenedDatasetCol.getPointFeatureIterator(); + assertThat(flattenedDatasetIter.hasNext()).isFalse(); + } + + @Test + public void shouldCreateMultipleTypes() { + SimplePointFeatureCollection pfc1 = new SimplePointFeatureCollection("pfc1", null, "m"); + pfc1.add(pf1); + + SimplePointFeatureCollection pfc2 = new SimplePointFeatureCollection("pfc2", null, "m"); + pfc2.add(pf2); + pfc2.add(pf3); + + SimplePointFeatureCollection pfc3 = new SimplePointFeatureCollection("pfc3", null, "m"); + + SimplePointFeatureCollection pfc4 = new SimplePointFeatureCollection("pfc4", null, "m"); + pfc4.add(pf4); + + SimplePointFeatureCollection pfc5 = new SimplePointFeatureCollection("pfc5", null, "m"); + pfc5.add(pf5); + pfc5.add(pf6); + pfc5.add(pf7); + + SimplePointFeatureCollection pfc6 = new SimplePointFeatureCollection("pfc6", null, "m"); + pfc6.add(pf8); + + SimplePointFeatureCollection pfc7 = new SimplePointFeatureCollection("pfc7", null, "m"); + pfc7.add(pf9); + + SimplePointFeatureCC pfcc1 = new SimplePointFeatureCC("pfcc1", null, "m", FeatureType.POINT); + pfcc1.add(pfc1); + pfcc1.add(pfc2); + + SimplePointFeatureCC pfcc2 = new SimplePointFeatureCC("pfcc2", null, "m", FeatureType.POINT); + pfcc2.add(pfc3); + pfcc2.add(pfc4); + + SimplePointFeatureCC pfcc3 = new SimplePointFeatureCC("pfcc3", null, "m", FeatureType.POINT); + pfcc3.add(pfc6); + pfcc3.add(pfc7); + + CalendarDateUnit dateUnit = CalendarDateUnit.of(null, "d since 1970-01-01 00:00:00"); + SimplePointFeatureCCC pfccc = new SimplePointFeatureCCC("pfccc", dateUnit, "m", FeatureType.POINT); + pfccc.add(pfcc1); + pfccc.add(pfcc2); + + // mock FeatureDatasetPoint to return 1 of each DsgFeatureCollection instance, then flatten it + when(fdPoint.getPointFeatureCollectionList()).thenReturn(Arrays.asList(pfccc, pfc5, pfcc3)); + FlattenedDatasetPointCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); + + // before iterating over the collection, bounds are null + assertThat(flattenedDatasetCol.getBoundingBox()).isNull(); + assertThat(flattenedDatasetCol.getCalendarDateRange()).isNull(); + + // get the iterator and enable bounds calculation + PointIteratorAbstract flattenedPointIter = (PointIteratorAbstract) flattenedDatasetCol.getPointFeatureIterator(); + flattenedPointIter.setCalculateBounds(flattenedDatasetCol.info); + + // iterate over the collection + List actualPointFeats = new ArrayList<>(); + flattenedPointIter.forEachRemaining(actualPointFeats::add); + + // the 9 PointFeatures are returned in order + assertThat(actualPointFeats).isEqualTo(Arrays.asList(pf1, pf2, pf3, pf4, pf5, pf6, pf7, pf8, pf9)); + + // the bounds include all 9 PointFeatures + assertThat(flattenedDatasetCol.size()).isEqualTo(9); + assertThat(flattenedDatasetCol.getBoundingBox()) + .isEqualTo(new LatLonRect(LatLonPoint.create(-85, -70), LatLonPoint.create(85, 170))); + + CalendarDateUnit calDateUnit = flattenedDatasetCol.timeUnit; + assertThat(flattenedDatasetCol.getCalendarDateRange()) + .isEqualTo(CalendarDateRange.of(calDateUnit.makeCalendarDate(23), calDateUnit.makeCalendarDate(400))); + } +} diff --git a/cdm/core/src/test/java/ucar/nc2/ft/point/TestPointIteratorFiltered.java b/cdm/core/src/test/java/ucar/nc2/ft/point/TestPointIteratorFiltered.java new file mode 100644 index 0000000000..9a356b115f --- /dev/null +++ b/cdm/core/src/test/java/ucar/nc2/ft/point/TestPointIteratorFiltered.java @@ -0,0 +1,74 @@ +package ucar.nc2.ft.point; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.NoSuchElementException; +import org.junit.Assert; +import org.junit.Test; +import ucar.ma2.Array; +import ucar.ma2.ArrayObject; +import ucar.ma2.StructureData; +import ucar.nc2.ft.FeatureDatasetPoint; +import ucar.nc2.ft.NoFactoryFoundException; +import ucar.nc2.ft.PointFeature; +import ucar.nc2.ft.PointFeatureCollection; +import ucar.nc2.ft.PointFeatureIterator; +import ucar.nc2.time.CalendarDate; +import ucar.nc2.time.CalendarDateRange; +import ucar.nc2.time.CalendarDateUnit; +import ucar.unidata.geoloc.LatLonPoint; +import ucar.unidata.geoloc.LatLonRect; + +public class TestPointIteratorFiltered { + + @Test + public void shouldFilterSpaceAndTime() throws IOException, URISyntaxException, NoFactoryFoundException { + try (FeatureDatasetPoint fdPoint = PointTestUtil.openPointDataset("pointsToFilter.ncml")) { + + double latMin = 10.0; + double latMax = 50.0; + double lonMin = -60.0; + double lonMax = 10.0; + LatLonRect filterBB = new LatLonRect(LatLonPoint.create(latMin, lonMin), LatLonPoint.create(latMax, lonMax)); + + CalendarDateUnit calDateUnit = CalendarDateUnit.of("standard", "days since 1970-01-01 00:00:00"); + CalendarDate start = calDateUnit.makeCalendarDate(20); + CalendarDate end = calDateUnit.makeCalendarDate(130); + CalendarDateRange filterDate = CalendarDateRange.of(start, end); + + // filtered point iterator + PointFeatureCollection flattenedDatasetCol = new FlattenedDatasetPointCollection(fdPoint); + PointFeatureIterator pointIterOrig = flattenedDatasetCol.getPointFeatureIterator(); + try (PointFeatureIterator pointIterFiltered = new PointIteratorFiltered(pointIterOrig, filterBB, filterDate)) { + assertThat(getIdsOfPoints(pointIterFiltered)).isEqualTo(Arrays.asList("BBB", "EEE")); + + // we call next() when there are no more elements + NoSuchElementException e = Assert.assertThrows(NoSuchElementException.class, pointIterFiltered::next); + assertThat(e.getMessage()).isEqualTo("This iterator has no more elements."); + } + } + } + + private static List getIdsOfPoints(PointFeatureIterator iter) throws IOException { + List ids = new ArrayList<>(); + while (iter.hasNext()) { + iter.hasNext(); // Test idempotency. This call should have no effect. + ids.add(getIdOfPoint(iter.next())); + } + return ids; + } + + private static String getIdOfPoint(PointFeature pointFeat) throws IOException { + StructureData data = pointFeat.getFeatureData(); + Array memberArray = data.getArray("id"); + assertThat(memberArray).isInstanceOf(ArrayObject.D0.class); + + ArrayObject.D0 memberArrayObject = (ArrayObject.D0) memberArray; + return (String) memberArrayObject.get(); + } +} diff --git a/cdm/core/src/test/java/ucar/nc2/ft2/coverage/TestHorizCoordSysGisTextBoundary.java b/cdm/core/src/test/java/ucar/nc2/ft2/coverage/TestHorizCoordSysGisTextBoundary.java new file mode 100644 index 0000000000..c12c48ea43 --- /dev/null +++ b/cdm/core/src/test/java/ucar/nc2/ft2/coverage/TestHorizCoordSysGisTextBoundary.java @@ -0,0 +1,56 @@ +package ucar.nc2.ft2.coverage; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import org.junit.Test; + +public class TestHorizCoordSysGisTextBoundary { + + // Builds on HorizCoordSysCrossSeamBoundarySpec."calcConnectedLatLonBoundaryPoints(2, 3) - lat/lon 1D"() + @Test + public void shouldGetLatLonBoundaryAsWKT() throws IOException, URISyntaxException { + HorizCoordSys horizCoordSys = getHorizCoordSysOfDataset("crossSeamLatLon1D.ncml"); + String actualWKT = horizCoordSys.getLatLonBoundaryAsWKT(2, 3); + + String expectedWKT = "POLYGON((" + "130.000 0.000, 170.000 0.000, 210.000 0.000, " + // Bottom edge + "230.000 0.000, 230.000 30.000, " + // Right edge + "230.000 50.000, 190.000 50.000, 150.000 50.000, " + // Top edge + "130.000 50.000, 130.000 20.000" + // Left edge + "))"; + + assertThat(actualWKT).isEqualTo(expectedWKT); + } + + // Builds on HorizCoordSysCrossSeamBoundarySpec."calcConnectedLatLonBoundaryPoints(2, 2) - lat/lon 2D"() + @Test + public void shouldGetLatLonBoundaryAsGeoJSON() throws IOException, URISyntaxException { + HorizCoordSys horizCoordSys = getHorizCoordSysOfDataset("crossSeamLatLon2D.ncml"); + String actualGeoJSON = horizCoordSys.getLatLonBoundaryAsGeoJSON(2, 2); + + String expectedGeoJSON = "{ 'type': 'Polygon', 'coordinates': [ [ " + "[-169.527, 44.874], [-145.799, 58.685], " + // Bottom + // edge + "[-106.007, 69.750], [-162.839, 82.356], " + // Right edge + "[-252.973, 85.132], [-221.068, 66.429], " + // Top edge + "[-206.411, 48.271], [-188.523, 47.761]" + // Left edge + " ] ] }"; + + assertThat(actualGeoJSON).isEqualTo(expectedGeoJSON); + } + + private HorizCoordSys getHorizCoordSysOfDataset(String resourceName) throws IOException, URISyntaxException { + File file = new File(getClass().getResource(resourceName).toURI()); + + try (FeatureDatasetCoverage featDsetCov = CoverageDatasetFactory.open(file.getAbsolutePath())) { + // Assert that featDsetCov was opened without failure and it contains 1 CoverageCollection. + assertThat(featDsetCov).isNotNull(); + assertThat(featDsetCov.getCoverageCollections().size()).isEqualTo(1); + + // Return HorizCoordSys from single CoverageCollection. + CoverageCollection covColl = featDsetCov.getCoverageCollections().get(0); + return covColl.getHorizCoordSys(); + } + } +} diff --git a/cdm/core/src/test/java/ucar/nc2/util/cache/TestReacquireClosedDataset.java b/cdm/core/src/test/java/ucar/nc2/util/cache/TestReacquireClosedDataset.java new file mode 100644 index 0000000000..2638ad6fee --- /dev/null +++ b/cdm/core/src/test/java/ucar/nc2/util/cache/TestReacquireClosedDataset.java @@ -0,0 +1,50 @@ +package ucar.nc2.util.cache; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.IOException; +import java.util.Formatter; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import ucar.nc2.dataset.DatasetUrl; +import ucar.nc2.dataset.NetcdfDataset; +import ucar.unidata.util.test.TestDir; + +/** + * Tests caching behavior when datasets are closed and then reacquired. + */ +public class TestReacquireClosedDataset { + + @BeforeClass + public static void setup() { + // All datasets, once opened, will be added to this cache. Config values copied from CdmInit. + NetcdfDataset.initNetcdfFileCache(100, 150, 12 * 60); + } + + @AfterClass + public static void cleanup() { + // Undo global changes we made in setup() so that they do not affect subsequent test classes. + NetcdfDataset.shutdown(); + } + + @Test + public void shouldReacquire() throws IOException { + String location = TestDir.cdmLocalTestDataDir + "jan.nc"; + + // Acquire and close dataset 4 times + for (int i = 0; i < 4; i++) { + NetcdfDataset.acquireDataset(DatasetUrl.findDatasetUrl(location), true, null).close(); + } + + Formatter formatter = new Formatter(); + NetcdfDataset.getNetcdfFileCache().showStats(formatter); + + // The cache will have recorded 1 miss (1st trial) and 3 hits (subsequent trials) + // This is kludgy, but FileCache doesn't provide getHits() or getMisses() methods. + assertThat(formatter.toString().trim()).contains("hits= 3 miss= 1"); + + // Prior to 2016-03-09 bug fix in AbstractIOServiceProvider.getLastModified(), + // this would record 0 hits and 4 misses. + } +} diff --git a/cdm/misc/build.gradle b/cdm/misc/build.gradle index 2e6f0cdf32..a1765e691d 100644 --- a/cdm/misc/build.gradle +++ b/cdm/misc/build.gradle @@ -5,8 +5,6 @@ apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" apply from: "$rootDir/gradle/any/protobuf.gradle" -apply plugin: 'groovy' // For Spock tests. - dependencies { api enforcedPlatform(project(':netcdf-java-platform')) testImplementation enforcedPlatform(project(':netcdf-java-testing-platform')) @@ -23,9 +21,6 @@ dependencies { testImplementation project(':cdm-test-utils') - testImplementation 'org.codehaus.groovy:groovy-all' // for spock - testImplementation 'org.spockframework:spock-core' - testImplementation 'commons-io:commons-io' testImplementation 'org.mockito:mockito-core' testImplementation 'com.google.truth:truth' diff --git a/cdm/radial/build.gradle b/cdm/radial/build.gradle index b769186ef3..6a57986bf8 100644 --- a/cdm/radial/build.gradle +++ b/cdm/radial/build.gradle @@ -4,8 +4,6 @@ ext.title = 'CDM radial library' apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" -apply plugin: 'groovy' // For Spock tests. - dependencies { api enforcedPlatform(project(':netcdf-java-platform')) testImplementation enforcedPlatform(project(':netcdf-java-testing-platform')) @@ -21,8 +19,6 @@ dependencies { testImplementation project(':cdm-test-utils') testImplementation project(':netcdf4') - testImplementation 'org.codehaus.groovy:groovy-all' // for spock - testImplementation 'org.spockframework:spock-core' testImplementation 'commons-io:commons-io' testImplementation 'org.mockito:mockito-core' testImplementation 'com.google.truth:truth' diff --git a/cdm/s3/build.gradle b/cdm/s3/build.gradle index d5f1c988b6..a4a108a170 100644 --- a/cdm/s3/build.gradle +++ b/cdm/s3/build.gradle @@ -4,8 +4,6 @@ ext.title = 'CDM S3 support library' apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" -apply plugin: 'groovy' // For Spock tests. - dependencies { api enforcedPlatform(project(':netcdf-java-platform')) testImplementation enforcedPlatform(project(':netcdf-java-testing-platform')) diff --git a/gradle/root/sonarqube.gradle b/gradle/root/sonarqube.gradle index 89d747b751..384ad087ed 100644 --- a/gradle/root/sonarqube.gradle +++ b/gradle/root/sonarqube.gradle @@ -58,7 +58,7 @@ gradle.projectsEvaluated { allprojects { sonarqube { properties { - properties['sonar.inclusions'] = '**/*.java, **/*.groovy' // Only scan Java and Groovy files. + properties['sonar.inclusions'] = '**/*.java' // Only scan Java files. properties['sonar.exclusions'] = '**/*Proto.java' // Don't analyze protobuf-generated code. // We're already reporting test failures and code coverage in Jenkins; we don't need to do it in Sonar diff --git a/legacy/build.gradle b/legacy/build.gradle index cc8255bf3f..9b9a3f77bf 100644 --- a/legacy/build.gradle +++ b/legacy/build.gradle @@ -5,8 +5,6 @@ ext.url = 'https://www.unidata.ucar.edu/software/tds/v4.6/TDS.html' apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" -apply plugin: 'groovy' // For Spock tests. - dependencies { api enforcedPlatform(project(':netcdf-java-platform')) testImplementation enforcedPlatform(project(':netcdf-java-testing-platform')) @@ -24,10 +22,5 @@ dependencies { compile 'com.amazonaws:aws-java-sdk-s3' // For CrawlableDatasetAmazonS3. testImplementation project(':cdm-test-utils') - // These are all for Spock. - testImplementation 'org.spockframework:spock-core' - testImplementation 'org.codehaus.groovy:groovy-all' testRuntimeOnly 'ch.qos.logback:logback-classic' - testRuntimeOnly 'cglib:cglib-nodep' - testRuntimeOnly 'org.objenesis:objenesis' } diff --git a/legacy/src/test/groovy/thredds/crawlabledataset/s3/CachingThreddsS3ClientSpec.groovy b/legacy/src/test/groovy/thredds/crawlabledataset/s3/CachingThreddsS3ClientSpec.groovy deleted file mode 100644 index 166cff2cc9..0000000000 --- a/legacy/src/test/groovy/thredds/crawlabledataset/s3/CachingThreddsS3ClientSpec.groovy +++ /dev/null @@ -1,179 +0,0 @@ -package thredds.crawlabledataset.s3 - -import com.amazonaws.services.s3.model.ObjectListing -import com.amazonaws.services.s3.model.ObjectMetadata -import com.google.common.base.Optional -import com.google.common.cache.RemovalListener -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification - -import java.nio.file.Files - -/** - * Tests the caching behavior of CachingThreddsS3Client. - * - * @author cwardgar - * @since 2015/08/27 - */ -class CachingThreddsS3ClientSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(CachingThreddsS3ClientSpec) - - // create a CachingThreddsS3Client that wraps our mock ThreddsS3Client - ThreddsS3Client mockThreddsS3Client = Mock(ThreddsS3Client) - RemovalListener> mockRemovalListener = Mock(RemovalListener) - ThreddsS3Client cachingThreddsS3Client = new CachingThreddsS3Client(mockThreddsS3Client, mockRemovalListener) - - def "getObjectMetadata"() { - setup: "create URI and mock return value" - S3URI s3uri = new S3URI("s3://bucket/existing-key") - ObjectMetadata mockObjectData = Mock(ObjectMetadata) - - when: "caching client's getObjectMetadata() is called twice" - cachingThreddsS3Client.getObjectMetadata(s3uri) - cachingThreddsS3Client.getObjectMetadata(s3uri) - - then: "mocking client's getObjectMetadata() is called exactly once. It is stubbed to return mockObjectData" - 1 * mockThreddsS3Client.getObjectMetadata(s3uri) >> mockObjectData - - and: "caching client is returning mockObjectData" - cachingThreddsS3Client.getObjectMetadata(s3uri) is mockObjectData - } - - def "listObjects"() { - setup: "create URI and mock return value" - S3URI s3uri = new S3URI("s3://bucket/existing-key") - ObjectListing mockObjectListing = Mock(ObjectListing) - - when: "caching client's listObjects() is called twice" - cachingThreddsS3Client.listObjects(s3uri) - cachingThreddsS3Client.listObjects(s3uri) - - then: "mocking client's listObjects() is called exactly once. It is stubbed to return mockObjectListing" - 1 * mockThreddsS3Client.listObjects(s3uri) >> mockObjectListing - - and: "caching client is returning mockObjectListing" - cachingThreddsS3Client.listObjects(s3uri) is mockObjectListing - } - - def "saveObjectToFile - missing key"() { - setup: "create URI" - S3URI s3uri = new S3URI("s3://bucket/missing-key") - File file = s3uri.getTempFile(); - - when: "caching client's saveObjectToFile() is called twice" - cachingThreddsS3Client.saveObjectToFile(s3uri, file) - cachingThreddsS3Client.saveObjectToFile(s3uri, file) - - then: "mocking client's saveObjectToFile() is called exactly once. It is stubbed to return null" - 1 * mockThreddsS3Client.saveObjectToFile(s3uri, file) >> null - - and: "caching client is returning null" - cachingThreddsS3Client.saveObjectToFile(s3uri, file) == null - - cleanup: "delete temp file" - file?.delete() - } - - def "saveObjectToFile - redownloading cached file"() { - setup: "create URI and File" - S3URI s3uri = new S3URI("s3://bucket/dataset.nc") - File file = createTempFile s3uri - - when: "caching client's saveObjectToFile() is called twice" - cachingThreddsS3Client.saveObjectToFile(s3uri, file) - cachingThreddsS3Client.saveObjectToFile(s3uri, file) - - and: "the saved file is deleted" - Files.delete(file.toPath()) - - and: "caching client's saveObjectToFile() is called twice more" - cachingThreddsS3Client.saveObjectToFile(s3uri, file) - cachingThreddsS3Client.saveObjectToFile(s3uri, file) - - then: "mocking client's saveObjectToFile() is called exactly twice. It is stubbed to return file" - 2 * mockThreddsS3Client.saveObjectToFile(s3uri, file) >> { // This is a closure that generates return values. - if (!file.exists()) { - // Before the 2nd call to this method, the file will have been deleted. We must re-create. - file.createNewFile() - } - file // Last statement is the (implicit) value of the closure. - } - - and: "entry for non-existent file was evicted" - 1 * mockRemovalListener.onRemoval({ it.getValue().get() == file }) - - and: "caching client is returning file" - cachingThreddsS3Client.saveObjectToFile(s3uri, file) is file - - cleanup: "delete temp file" - Files.delete(file.toPath()) - } - - def "saveObjectToFile - download object to 2 different files"() { - setup: "create URI and Files" - S3URI s3uri = new S3URI("s3://bucket/dataset.nc") - File file1 = File.createTempFile("file1", ".nc") - File file2 = File.createTempFile("file2", ".nc") - - when: "caching client's saveObjectToFile() is called once with file1 and once with file2" - cachingThreddsS3Client.saveObjectToFile(s3uri, file1) - cachingThreddsS3Client.saveObjectToFile(s3uri, file2) - - then: "mocking client's saveObjectToFile() is called exactly once. It's stubbed to return file1" - 1 * mockThreddsS3Client.saveObjectToFile(s3uri, file1) >> file1 - - and: "old entry for s3uri was evicted" - 1 * mockRemovalListener.onRemoval({ it.getKey() == s3uri && it.getValue().get() == file1 }) - - and: "caching client is returning file2" - cachingThreddsS3Client.saveObjectToFile(s3uri, file2) is file2 - - cleanup: "delete temp files" - Files.delete(file1.toPath()) - Files.delete(file2.toPath()) - } - - def "clear"() { - setup: "create caching client that uses real RemovalListener" - cachingThreddsS3Client = new CachingThreddsS3Client(mockThreddsS3Client) // Deletes files evicted from cache. - - and: "create URIs" - S3URI s3uri1 = new S3URI("s3://bucket/dataset1.nc") - S3URI s3uri2 = new S3URI("s3://bucket/dataset2.nc") - S3URI s3uri3 = new S3URI("s3://bucket/dataset3.nc") - - and: "create temp files" - File file1 = createTempFile s3uri1 - File file2 = createTempFile s3uri2 - File file3 = createTempFile s3uri3 - - and: "mocking client's saveObjectToFile() is stubbed to return file1, file2, and file3 in order" - mockThreddsS3Client.saveObjectToFile(_, _) >>> [file1, file2, file3] - - expect: "save objects to files, adding them to cache" - cachingThreddsS3Client.saveObjectToFile(s3uri1, file1) == file1 - cachingThreddsS3Client.saveObjectToFile(s3uri2, file2) == file2 - cachingThreddsS3Client.saveObjectToFile(s3uri3, file3) == file3 - - and: "files exist" - file1.exists() - file2.exists() - file3.exists() - - when: "cache is cleared" - cachingThreddsS3Client.clear() - - then: "files no longer exist" - !file1.exists() - !file2.exists() - !file3.exists() - } - - File createTempFile(S3URI s3URI) { - File file = s3URI.tempFile - file.parentFile.mkdirs() - file.createNewFile() - file - } -} diff --git a/legacy/src/test/groovy/thredds/crawlabledataset/s3/CrawlableDatasetAmazonS3Spec.groovy b/legacy/src/test/groovy/thredds/crawlabledataset/s3/CrawlableDatasetAmazonS3Spec.groovy deleted file mode 100644 index 54d217d284..0000000000 --- a/legacy/src/test/groovy/thredds/crawlabledataset/s3/CrawlableDatasetAmazonS3Spec.groovy +++ /dev/null @@ -1,278 +0,0 @@ -package thredds.crawlabledataset.s3 - -import com.amazonaws.services.s3.model.ObjectListing -import com.amazonaws.services.s3.model.ObjectMetadata -import com.amazonaws.services.s3.model.S3ObjectSummary -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Shared -import spock.lang.Specification -import thredds.crawlabledataset.CrawlableDataset - -/** - * Tests CrawlableDatasetAmazonS3 - * - * @author cwardgar - * @since 2015/08/14 - */ -class CrawlableDatasetAmazonS3Spec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(CrawlableDatasetAmazonS3Spec) - - // Shared resources are initialized in setupSpec() - @Shared S3URI parentDirUri, childDir1Uri, childDir2Uri, dataset1Uri, dataset2Uri - @Shared long dataset1Length, dataset2Length - @Shared Date dataset1LastModified, dataset2LastModified - - @Shared ObjectListing parentDirObjectListing, childDir1ObjectListing, childDir2ObjectListing - @Shared ObjectMetadata dataset1ObjectMetadata, dataset2ObjectMetadata - - // create mock client that returns a listing with two objects and two directories - def setupSpec() { - parentDirUri = new S3URI("s3://bucket/parentDir") - childDir1Uri = new S3URI("s3://bucket/parentDir/childDir1") - childDir2Uri = new S3URI("s3://bucket/parentDir/childDir2") - dataset1Uri = new S3URI("s3://bucket/parentDir/childDir1/dataset1.nc") - dataset2Uri = new S3URI("s3://bucket/parentDir/childDir2/dataset2.nc") - - dataset1Length = 1337 - dataset2Length = 42 - - dataset1LastModified = new Date(1941, 11, 7) - dataset2LastModified = new Date(1952, 2, 11) - - /* - * These are return values from a mocked ThreddsS3Client. Together, they describe the following file collection: - * parentDir/ - * childDir1/ - * dataset1.nc - * childDir2/ - * dataset2.nc - */ - - // To be returned by: threddsS3Client.listObjects(parentDirUri) - parentDirObjectListing = Mock(ObjectListing) { - getObjectSummaries() >> [] - getCommonPrefixes() >> [childDir1Uri.key, childDir2Uri.key] - } - - // To be returned by: threddsS3Client.listObjects(childDir1Uri) - childDir1ObjectListing = Mock(ObjectListing) { - getObjectSummaries() >> [ - Mock(S3ObjectSummary) { - getBucketName() >> dataset1Uri.bucket - getKey() >> dataset1Uri.key - getSize() >> dataset1Length - getLastModified() >> dataset1LastModified - } - ] - getCommonPrefixes() >> [] - } - - // To be returned by: threddsS3Client.listObjects(childDir2Uri) - childDir2ObjectListing = Mock(ObjectListing) { - getObjectSummaries() >> [ - Mock(S3ObjectSummary) { - getBucketName() >> dataset2Uri.bucket - getKey() >> dataset2Uri.key - getSize() >> dataset2Length - getLastModified() >> dataset2LastModified - } - ] - getCommonPrefixes() >> [] - } - - // To be returned by: threddsS3Client.getObjectMetadata(dataset1Uri) - dataset1ObjectMetadata = Mock(ObjectMetadata) { - getContentLength() >> dataset1Length - getLastModified() >> dataset1LastModified - } - - // To be returned by: threddsS3Client.getObjectMetadata(dataset2Uri) - dataset2ObjectMetadata = Mock(ObjectMetadata) { - getContentLength() >> dataset2Length - getLastModified() >> dataset2LastModified - } - - // The default client is a mock ThreddsS3Client that returns default values from all its methods. - CrawlableDatasetAmazonS3.defaultThreddsS3Client = Mock(ThreddsS3Client) - } - - // Clear the object summary cache before each feature method runs. - def setup() { - CrawlableDatasetAmazonS3.clearCache() - } - - - // These getter methods rely heavily on S3URI functionality, which is already tested thoroughly in S3URISpec. - def "getPath"() { - expect: - new CrawlableDatasetAmazonS3("s3://bucket/some/key").path == "s3://bucket/some/key" - } - - def "getName"() { - expect: - new CrawlableDatasetAmazonS3("s3://bucket/some/other-key").name == "other-key" - } - - def "getParentDataset"() { - setup: "use defaultThreddsS3Client" - CrawlableDatasetAmazonS3 parent = new CrawlableDatasetAmazonS3("s3://bucket/one/two") - CrawlableDatasetAmazonS3 child = new CrawlableDatasetAmazonS3("s3://bucket/one/two/three") - - expect: - child.getParentDataset() == parent - } - - def "getDescendant"() { - setup: "use defaultThreddsS3Client" - CrawlableDatasetAmazonS3 parent = new CrawlableDatasetAmazonS3("s3://bucket/one/two") - CrawlableDatasetAmazonS3 child = new CrawlableDatasetAmazonS3("s3://bucket/one/two/three") - - expect: - parent.getDescendant("three") == child - } - - def "exists"() { - setup: - ThreddsS3Client threddsS3Client = Mock(ThreddsS3Client) { - 1 * listObjects(childDir1Uri) >> childDir1ObjectListing - 1 * getObjectMetadata(dataset1Uri) >> dataset1ObjectMetadata - } - - expect: - new CrawlableDatasetAmazonS3(childDir1Uri, null, threddsS3Client).exists() - new CrawlableDatasetAmazonS3(dataset1Uri, null, threddsS3Client).exists() - !new CrawlableDatasetAmazonS3(parentDirUri.getChild("non-existent-dataset.nc"), null, threddsS3Client).exists() - } - - def "isCollection"() { - setup: - ThreddsS3Client threddsS3Client = Mock(ThreddsS3Client) { - 1 * listObjects(parentDirUri) >> parentDirObjectListing - 1 * listObjects(childDir2Uri) >> childDir2ObjectListing - } - - expect: - new CrawlableDatasetAmazonS3(parentDirUri, null, threddsS3Client).isCollection() - new CrawlableDatasetAmazonS3(childDir2Uri, null, threddsS3Client).isCollection() - !new CrawlableDatasetAmazonS3(dataset1Uri, null, threddsS3Client).isCollection() - } - - def "listDatasets success"() { - setup: - ThreddsS3Client threddsS3Client = Mock(ThreddsS3Client) { - 1 * listObjects(parentDirUri) >> parentDirObjectListing - 1 * listObjects(childDir1Uri) >> childDir1ObjectListing - 1 * listObjects(childDir2Uri) >> childDir2ObjectListing - } - - and: "s3://bucket/parentDir" - CrawlableDatasetAmazonS3 parentDir = new CrawlableDatasetAmazonS3(parentDirUri, null, threddsS3Client) - - when: - List childDirs = parentDir.listDatasets(); - - then: "there are two datasets" - childDirs.size() == 2 - - and: "s3://bucket/parentDir/childDir1" - CrawlableDatasetAmazonS3 childDir1 = childDirs[0] as CrawlableDatasetAmazonS3 - childDir1.s3URI == childDir1Uri - - and: "s3://bucket/parentDir/childDir2" - CrawlableDatasetAmazonS3 childDir2 = childDirs[1] as CrawlableDatasetAmazonS3 - childDir2.s3URI == childDir2Uri - - when: - List childDir1Datasets = childDir1.listDatasets() - - then: "s3://bucket/parentDir/childDir1/dataset1.nc" - childDir1Datasets.size() == 1 - (childDir1Datasets[0] as CrawlableDatasetAmazonS3).s3URI == dataset1Uri - - when: - List childDir2Datasets = childDir2.listDatasets() - - then: "s3://bucket/parentDir/childDir1/dataset2.nc" - childDir2Datasets.size() == 1 - (childDir2Datasets[0] as CrawlableDatasetAmazonS3).s3URI == dataset2Uri - } - - def "listDatasets failure"() { - setup: "use defaultThreddsS3Client" - CrawlableDataset dataset = new CrawlableDatasetAmazonS3(dataset1Uri) - - when: "listObjects(dataset1Uri) will return null" - dataset.listDatasets() - - then: - IllegalStateException e = thrown() - e.message == "'$dataset1Uri' is not a collection dataset." - } - - def "length and lastModified success (missing cache)"() { - setup: - ThreddsS3Client threddsS3Client = Mock(ThreddsS3Client) - CrawlableDataset dataset = new CrawlableDatasetAmazonS3(dataset1Uri, null, threddsS3Client) - - when: "call length() and lastModified() without first doing listDatasets() on parent" - def length = dataset.length() - def lastModified = dataset.lastModified() - - then: "get the metadata directly from threddsS3Client because it wasn't in the cache" - 2 * threddsS3Client.getObjectMetadata(dataset1Uri) >> dataset1ObjectMetadata - - and: "length() is returning the stubbed value" - length == dataset1ObjectMetadata.contentLength - - and: "lastModified() is returning the stubbed value" - lastModified == dataset1ObjectMetadata.lastModified - } - - def "length and lastModified success (hitting cache)"() { - setup: - ThreddsS3Client threddsS3Client = Mock(ThreddsS3Client) - CrawlableDataset dataset = new CrawlableDatasetAmazonS3(dataset2Uri, null, threddsS3Client) - - when: "we listDatasets() on the parent directory, filling the cache with object summaries" - dataset.getParentDataset().listDatasets() - - and: "call length() and lastModified() with object summaries in the cache" - def length = dataset.length() - def lastModified = dataset.lastModified() - - then: "listObjects() is called once and getObjectMetadata never gets called" - 1 * threddsS3Client.listObjects(childDir2Uri) >> childDir2ObjectListing - 0 * threddsS3Client.getObjectMetadata(_) - - and: "length() is returning the stubbed value" - length == dataset2ObjectMetadata.contentLength - - and: "lastModified() is returning the stubbed value" - lastModified == dataset2ObjectMetadata.lastModified - } - - def "length and lastModified failure (missing cache)"() { - setup: - S3URI nonExistentUri = parentDirUri.getChild("non-existent-dataset.nc") - ThreddsS3Client threddsS3Client = Mock(ThreddsS3Client) - CrawlableDataset dataset = new CrawlableDatasetAmazonS3(nonExistentUri, null, threddsS3Client) - - when: "we listDatasets() on the parent directory, there will be no summary for nonExistentUri" - dataset.getParentDataset().listDatasets() - - and: "call length() and lastModified() with object summaries in the cache" - def length = dataset.length() - def lastModified = dataset.lastModified() - - then: "getObjectMetadata() will get called due to cache misses" - 1 * threddsS3Client.listObjects(nonExistentUri.parent) >> parentDirObjectListing - 2 * threddsS3Client.getObjectMetadata(nonExistentUri) >> null - - and: "length() is returning the missing value: 0" - length == 0L - - and: "lastModified() is returning the missing value: null" - lastModified == null - } -} diff --git a/legacy/src/test/groovy/thredds/crawlabledataset/s3/S3URISpec.groovy b/legacy/src/test/groovy/thredds/crawlabledataset/s3/S3URISpec.groovy deleted file mode 100644 index 2a4dace4e5..0000000000 --- a/legacy/src/test/groovy/thredds/crawlabledataset/s3/S3URISpec.groovy +++ /dev/null @@ -1,133 +0,0 @@ -package thredds.crawlabledataset.s3 - -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification - -/** - * Tests S3URI. - * - * @author cwardgar - * @since 2015/08/26 - */ -class S3URISpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(S3URISpec) - - def "constructor throws exception for invalid arguments"() { - when: "no S3 prefix" - new S3URI("/path/to/object") - - then: - IllegalArgumentException e1 = thrown() - e1.message == "S3 URI '/path/to/object' does not start with the expected prefix 's3://'." - - when: "null bucket" - new S3URI(null, "foo") - - then: - NullPointerException e2 = thrown() - e2.message == "Bucket must be non-null." - - when: "bucket name length less than 3" - new S3URI("be", "kind") - - then: - IllegalArgumentException e3 = thrown() - e3.message == "Bucket name 'be' must be at least 3 characters." - - when: "key can't be empty string, 1-arg ctor" - new S3URI("s3://bucket/") - - then: - IllegalArgumentException e4 = thrown() - e4.message == "Key may not be the empty string." - - when: "key can't be empty string, 2-arg ctor" - new S3URI("bucket", "") - - then: - IllegalArgumentException e5 = thrown() - e5.message == "Key may not be the empty string." - - when: "key can't contain consecutive delimiters" - new S3URI("s3://bucket/one//two/three/") - - then: - IllegalArgumentException e6 = thrown() - e6.message == "Key 'one//two/three/' contains consecutive delimiters." - } - - def "constructor allows null keys"() { - expect: "1-arg ctor" - new S3URI("s3://bucket").getKey() == null - - and: "2-arg ctor" - new S3URI("bucket", null).getKey() == null - } - - def "constructor removes trailing delimiter"() { - expect: - new S3URI("s3://bucket/key/").getKey() == "key" - new S3URI("bucket", "some/other/key/").getKey() == "some/other/key" - } - - def "getKeyWithTrailingDelimiter"() { - expect: - new S3URI("s3://bucket/key").getKeyWithTrailingDelimiter() == "key/" - new S3URI("bucket", "key/").getKeyWithTrailingDelimiter() == "key/" - new S3URI("s3://bucket").getKeyWithTrailingDelimiter() == null - } - - def "getBaseName"() { - expect: - new S3URI("s3://bucket/some/very/long/key/name/yep!").getBaseName() == "yep!" - new S3URI("bucket", null).getBaseName() == null - } - - def "getParent"() { - expect: - new S3URI("s3://bucket/one/two/three").getParent() == new S3URI("s3://bucket/one/two") - new S3URI("s3://bucket/no-parent").getParent() == new S3URI("s3://bucket") - new S3URI("bucket", null).getParent() == null - } - - def "getChild"() { - expect: "success" - new S3URI("s3://dogs/husky").getChild("") == new S3URI("s3://dogs/husky") - new S3URI("s3://shire").getChild("frodo/baggins") == new S3URI("s3://shire/frodo/baggins") - new S3URI("s3://hobbits/frodo/sam").getChild("merry/pippin") == new S3URI("s3://hobbits/frodo/sam/merry/pippin") - - // Failures - when: "null relativePath" - new S3URI("s3://shire").getChild(null) - - then: - NullPointerException e1 = thrown() - e1.message == "relativePath must be non-null." - - when: "absolute path" - new S3URI("s3://shire/hobbiton").getChild("/rohan/edoras") - - then: - IllegalArgumentException e2 = thrown() - e2.message == "Path '/rohan/edoras' should be relative but begins with the delimiter string '/'." - - when: "consecutive delimiters" - new S3URI("s3://shire/hobbiton").getChild("nasty//taters") - - then: - IllegalArgumentException e4 = thrown() - e4.message == "Key 'hobbiton/nasty//taters' contains consecutive delimiters." - } - - def "toString test"() { - expect: - new S3URI("bucket", null).toString() == "s3://bucket" - new S3URI("bucket", "one/two").toString() == "s3://bucket/one/two" - new S3URI("s3://bucket/key/").toString() == "s3://bucket/key" - } - - // equals() and hashCode() methods are simple and were auto-generated by IntelliJ, so they are very unlikely to - // be correct. So, I'm not going to test them here. Besides, it's a pain in the ass to fully test the contract - // of those two. -} diff --git a/legacy/src/test/groovy/thredds/crawlabledataset/s3/ThreddsS3ClientImplSpec.groovy b/legacy/src/test/groovy/thredds/crawlabledataset/s3/ThreddsS3ClientImplSpec.groovy deleted file mode 100644 index 449aeb92f8..0000000000 --- a/legacy/src/test/groovy/thredds/crawlabledataset/s3/ThreddsS3ClientImplSpec.groovy +++ /dev/null @@ -1,133 +0,0 @@ -package thredds.crawlabledataset.s3 - -import com.amazonaws.AmazonServiceException -import com.amazonaws.services.s3.AmazonS3Client -import com.amazonaws.services.s3.model.ObjectListing -import com.amazonaws.services.s3.model.ObjectMetadata -import com.amazonaws.services.s3.model.S3ObjectSummary -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification - -/** - * Tests that ThreddsS3ClientImpl implements the contract of ThreddsS3Client, particularly with respect to unhappy - * code paths. Makes heavy use of mocking to avoid actually connecting to Amazon S3. - *

- * TODO: These unit tests are nice and fast, but we need integration tests too. For that, we'll need an AWS instance - * under our control that we can host test data on. - * - * @author cwardgar - * @since 2015/08/26 - */ -class ThreddsS3ClientImplSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(ThreddsS3ClientImplSpec) - - ObjectListing emptyMockObjectListing - ObjectListing nonEmptyMockObjectListing - AmazonServiceException amazonServiceException - - def setup() { - emptyMockObjectListing = Mock(ObjectListing) { - getObjectSummaries() >> [] - getCommonPrefixes() >> [] - } - - nonEmptyMockObjectListing = Mock(ObjectListing) { - getObjectSummaries() >> [Mock(S3ObjectSummary)] - getCommonPrefixes() >> ['fake'] - } - - // Create exception that stubbed methods will throw. - amazonServiceException = new AmazonServiceException("error") - amazonServiceException.setStatusCode(404) - } - - def "null key"() { - setup: "create mock to avoid actually connecting to S3" - AmazonS3Client amazonS3Client = Mock(AmazonS3Client) { - // This is the behavior of the actual AmazonS3Client for these 3 methods when key is null. - getObjectMetadata(*_) >> { throw new IllegalArgumentException("null key") } - listObjects(*_) >> nonEmptyMockObjectListing - getObject(*_) >> { throw new IllegalArgumentException("null key") } - } - - and: "create ThreddsS3Client that uses the mock AmazonS3Client" - ThreddsS3Client threddsS3Client = new ThreddsS3ClientImpl(amazonS3Client) - - and: "create URI with null key (not that it matters with the mocking we've done)" - S3URI s3uri = new S3URI("s3://imos-data") - - expect: "null key" - threddsS3Client.getObjectMetadata(s3uri) == null - threddsS3Client.listObjects(s3uri).commonPrefixes == ['fake'] // doesn't need a key. - threddsS3Client.saveObjectToFile(s3uri, new File("some file")) == null - } - - def "non-existent bucket"() { - setup: "create mock to avoid actually connecting to S3" - AmazonS3Client amazonS3Client = Mock(AmazonS3Client) { - // This is the behavior of the actual AmazonS3Client for these 3 methods when bucket is non-existent. - getObjectMetadata(*_) >> { throw amazonServiceException } - listObjects(*_) >> { throw amazonServiceException } - getObject(*_) >> { throw amazonServiceException } - } - - and: "create ThreddsS3Client that uses the mock AmazonS3Client" - ThreddsS3Client threddsS3Client = new ThreddsS3ClientImpl(amazonS3Client) - - and: "create URI with non-existent bucket (not that it matters with the mocking we've done)" - S3URI s3uri = new S3URI("s3://non-existent-bucket/blah") - - expect: "non-existent bucket" - threddsS3Client.getObjectMetadata(s3uri) == null - threddsS3Client.listObjects(s3uri) == null - threddsS3Client.saveObjectToFile(s3uri, new File("some file")) == null - } - - def "non-existent key"() { - setup: "create mock to avoid actually connecting to S3" - AmazonS3Client amazonS3Client = Mock(AmazonS3Client) { - // This is the behavior of the actual AmazonS3Client for these 3 methods when key is non-existent. - getObjectMetadata(*_) >> { throw amazonServiceException } - listObjects(*_) >> emptyMockObjectListing - getObject(*_) >> { throw amazonServiceException } - } - - and: "create ThreddsS3Client that uses the mock AmazonS3Client" - ThreddsS3Client threddsS3Client = new ThreddsS3ClientImpl(amazonS3Client) - - and: "create URI with non-existent key (not that it matters with the mocking we've done)" - S3URI s3uri = new S3URI("s3://imos-data/non-existent-key") - - expect: "non-existent key" - threddsS3Client.getObjectMetadata(s3uri) == null - threddsS3Client.listObjects(s3uri) == null - threddsS3Client.saveObjectToFile(s3uri, new File("some file")) == null - } - - def "existent bucket and key"() { - setup: "create mock ObjectMetadata" - ObjectMetadata mockObjectMetadata = Mock(ObjectMetadata) { - getContentType() >> 'fake' - } - - and: "create mock to avoid actually connecting to S3" - AmazonS3Client amazonS3Client = Mock(AmazonS3Client) { - // This is the behavior of the actual AmazonS3Client for these 3 methods when bucket and key exist. - getObjectMetadata(*_) >> mockObjectMetadata // Non-null ObjectMetadata - listObjects(*_) >> emptyMockObjectListing // Empty ObjectListing - getObject(*_) >> mockObjectMetadata // Non-null ObjectMetadata - } - - and: "create ThreddsS3Client that uses the mock AmazonS3Client" - ThreddsS3Client threddsS3Client = new ThreddsS3ClientImpl(amazonS3Client) - - and: "create URI with existent bucket and key (not that it matters with the mocking we've done)" - S3URI s3uri = new S3URI("s3://bucket/dataset.nc") - - expect: "existent bucket and key" - threddsS3Client.getObjectMetadata(s3uri).contentType == 'fake' - threddsS3Client.listObjects(s3uri) == null - threddsS3Client.saveObjectToFile(s3uri, s3uri.getTempFile()).name.endsWith('dataset.nc') - } -} diff --git a/netcdf-java-testing-platform/build.gradle b/netcdf-java-testing-platform/build.gradle index 3163b2a19c..5527e0abee 100644 --- a/netcdf-java-testing-platform/build.gradle +++ b/netcdf-java-testing-platform/build.gradle @@ -18,10 +18,6 @@ dependencies { // Fluent assertions for Java api 'com.google.truth:truth:1.0' - // These two are for Spock. - api 'org.spockframework:spock-core:1.3-groovy-2.5' - api 'org.codehaus.groovy:groovy-all:2.5.12' - //mockito api 'org.mockito:mockito-core:2.28.2' @@ -33,14 +29,6 @@ dependencies { // opendap, dap4, and httpservices api 'org.testcontainers:testcontainers:1.19.7' - - // In Spock, allows mocking of classes (in addition to interfaces). - // todo: remove with legacy in 6 - runtime 'cglib:cglib-nodep:3.2.4' - - // In Spock, allows mocking of classes without default constructor (together with CGLIB). - // todo: remove with legacy in 6 - runtime 'org.objenesis:objenesis:2.4' } } diff --git a/netcdf4/build.gradle b/netcdf4/build.gradle index 7f1cf95400..9038c9cf91 100644 --- a/netcdf4/build.gradle +++ b/netcdf4/build.gradle @@ -4,7 +4,6 @@ ext.url = 'https://www.unidata.ucar.edu/software/netcdf/' apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" -apply plugin: 'groovy' // For Spock tests. apply plugin: 'jacoco' dependencies { @@ -18,8 +17,6 @@ dependencies { testImplementation project(':cdm-test-utils') testImplementation 'com.google.truth:truth' - testImplementation 'org.codehaus.groovy:groovy-all' // for spock - testImplementation 'org.spockframework:spock-core' testRuntimeOnly 'ch.qos.logback:logback-classic' } @@ -31,7 +28,6 @@ dependencies { sourceSets { unloadedTest { - groovy.srcDir file('src/unloadedTest/groovy') resources.srcDir file('src/unloadedTest/resources') compileClasspath += sourceSets.main.output + configurations.testCompileClasspath runtimeClasspath += output + sourceSets.main.output + configurations.testRuntimeClasspath diff --git a/netcdf4/src/main/java/ucar/nc2/jni/netcdf/Nc4Iosp.java b/netcdf4/src/main/java/ucar/nc2/jni/netcdf/Nc4Iosp.java index 2bcb679edd..91d45f12e6 100755 --- a/netcdf4/src/main/java/ucar/nc2/jni/netcdf/Nc4Iosp.java +++ b/netcdf4/src/main/java/ucar/nc2/jni/netcdf/Nc4Iosp.java @@ -1131,7 +1131,7 @@ private String nc_inq_var_name(int grpid, int varno) throws IOException { } ////////////////////////////////////////////////////////////////////////// - private static class Vinfo { + static class Vinfo { final Group4 g4; int varid, typeid; UserType utype; // may be null @@ -1143,7 +1143,7 @@ private static class Vinfo { } } - private static class Group4 { + static class Group4 { final int grpid; final Group g; final Group4 parent; diff --git a/netcdf4/src/test/groovy/ucar/nc2/jni/netcdf/Nc4IospMiscSpec.groovy b/netcdf4/src/test/groovy/ucar/nc2/jni/netcdf/Nc4IospMiscSpec.groovy deleted file mode 100644 index 2cd3592be7..0000000000 --- a/netcdf4/src/test/groovy/ucar/nc2/jni/netcdf/Nc4IospMiscSpec.groovy +++ /dev/null @@ -1,172 +0,0 @@ -package ucar.nc2.jni.netcdf - -import org.junit.Rule -import org.junit.rules.TemporaryFolder -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification -import spock.lang.Unroll -import ucar.ma2.Array -import ucar.ma2.ArrayChar -import ucar.ma2.DataType -import ucar.nc2.Attribute -import ucar.nc2.Dimension -import ucar.nc2.NetcdfFile -import ucar.nc2.NetcdfFileWriter -import ucar.nc2.Variable - -/** - * Tests miscellaneous aspects of Nc4Iosp. - * - * @author cwardgar - * @since 2017-03-27 - */ -class Nc4IospMiscSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(Nc4IospMiscSpec) - - @Rule TemporaryFolder tempFolder = new TemporaryFolder() - - /* - * Demonstrates bug from https://www.unidata.ucar.edu/mailing_lists/archives/netcdf-java/2017/msg00012.html - * Prior to fix, this test would fail for 'u_short', 'u_int', and 'u_long' variables with - * "Unknown userType == 8", "Unknown userType == 9", and "Unknown userType == 11" errors respectively. - */ - @Unroll // Report iterations of method independently. - def "Nc4Iosp.readDataSection() can read '#varName' variables"() { - setup: "locate test file" - File file = new File(this.class.getResource("unsigned.nc4").toURI()) - assert file.exists() - - and: "open it as a NetcdfFile using Nc4Iosp" - NetcdfFile ncFile = NetcdfFile.open(file.absolutePath, Nc4Iosp.class.canonicalName, -1, null, null) - - and: "grab the Nc4Iosp instance within so that we can test Nc4Iosp.readDataSection()" - Nc4Iosp nc4Iosp = ncFile.iosp as Nc4Iosp - - when: "read all of var's data using readDataSection()" - Variable var = ncFile.findVariable(varName) - Nc4Iosp.Vinfo vinfo = var.SPobject as Nc4Iosp.Vinfo - Array array = nc4Iosp.readDataSection(vinfo.g4.grpid, vinfo.varid, vinfo.typeid, var.shapeAsSection); - - then: "actual data equals expected data" - array.storage == expectedData - - cleanup: "close NetcdfFile" - ncFile?.close() - - where: "data are too big for their type. Overflow expected because Java doesn't support unsigned types" - varName << [ "u_byte", "u_short", "u_int", "u_long" ] - expectedData << [ - [(1 << 7), (1 << 7) + 1, (1 << 7) + 2] as byte[], // Will overflow to [-128, -127, -126] - [(1 << 15), (1 << 15) + 1, (1 << 15) + 2] as short[], - [(1 << 31), (1 << 31) + 1, (1 << 31) + 2] as int[], - [(1L << 63), (1L << 63) + 1, (1L << 63) + 2] as long[] - ]; - } - - /* - * Demonstrates bug from - * https://andy.unidata.ucar.edu/esupport/staff/index.php?_m=tickets&_a=viewticket&ticketid=28098 - * Prior to fix, primary2Dim and primary3Dim were not being identified as unlimited. - */ - def "Nc4Iosp supports multiple groups, each containing an unlimited dimension"() { - setup: "locate test file" - File file = new File(this.class.getResource("DBP-690959.nc4").toURI()) - assert file.exists() - - and: "open it as a NetcdfFile using Nc4Iosp" - NetcdfFile ncFile = NetcdfFile.open(file.absolutePath, Nc4Iosp.class.canonicalName, -1, null, null) - - and: "find unlimited dimensions" - Dimension primary1Dim = ncFile.findDimension("/group1/primary") - Dimension primary2Dim = ncFile.findDimension("/group2/primary") - Dimension primary3Dim = ncFile.findDimension("/group3/primary") - - expect: "all dimensions are unlimited" - primary1Dim.isUnlimited() - primary2Dim.isUnlimited() - primary3Dim.isUnlimited() - - cleanup: "close NetcdfFile" - ncFile?.close() - } - - def "create NetCDF-4 file with unlimited dimension"() { - setup: "create temp file that will be deleted after test by TemporaryFolder @Rule" - File tempFile = new File(tempFolder.root, "Nc4IospMiscSpec.nc4") - - and: "open a NetcdfFileWriter that will write NetCDF-4 to tempFile" - NetcdfFileWriter ncWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf4, tempFile.absolutePath) - - and: "add an unlimited dimension and create the file on disk" - Dimension dimBefore = ncWriter.addDimension(null, "dim", 3, true, false) - ncWriter.create() - - and: "close the file for writing and reopen it for reading" - ncWriter.close() - NetcdfFile ncFile = NetcdfFile.open(tempFile.absolutePath) - - expect: "the dimension is the same after the write/read round-trip" - Dimension dimAfter = ncFile.findDimension(dimBefore.fullName) - // Failed prior to fix, because dimAfter was not unlimited. - dimBefore.equals dimAfter - - cleanup: "close writer and reader" - ncWriter?.close() // Under normal circumstances, this will already be closed. Luckily method is idempotent. - ncFile?.close() - } - - def "create NetCDF-4 file null valued attributes"() { - setup: "create temp file that will be deleted after test by TemporaryFolder @Rule" - File tempFile = new File(tempFolder.root, "Nc4IospMiscSpec.nc4") - - and: "open a NetcdfFileWriter that will write NetCDF-4 to tempFile" - NetcdfFileWriter ncWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf4, tempFile.absolutePath) - - and: "add a numerical valued attribute with a null value" - Attribute attrNum = new Attribute("nullvalnum", DataType.INT) - Attribute attrNumBefore = ncWriter.addGlobalAttribute(attrNum) - - and: "add a string valued attribute with a null value" - Attribute attrStr = new Attribute("nullvalstr", DataType.STRING) - Attribute attrStrBefore = ncWriter.addGlobalAttribute(attrStr) - - and: "add a character valued attribute with a null value" - Attribute attrChar = new Attribute("nullvalchar", DataType.CHAR) - Attribute attrCharBefore = ncWriter.addGlobalAttribute(attrChar) - - and: "add a character valued attribute with a specific null char value" - Attribute attrNullChar = new Attribute("nullcharvalchar", DataType.CHAR) - Array attrNullCharValue = ArrayChar.makeFromString("\0", 1); - attrNullChar.setValues(attrNullCharValue) - Attribute attrNullCharBefore = ncWriter.addGlobalAttribute(attrNullChar) - ncWriter.create() - - and: "close the file for writing and reopen it for reading" - ncWriter.close() - NetcdfFile ncFile = NetcdfFile.open(tempFile.absolutePath) - - expect: "the value of the attributes are null" - Attribute attrNumAfter = ncFile.findGlobalAttribute(attrNumBefore.fullName) - attrNumBefore.getValues().equals attrNumAfter.getValues() - attrNumBefore.getValues() == null - - Attribute attrStrAfter = ncFile.findGlobalAttribute(attrStrBefore.fullName) - attrStrBefore.getValues().equals attrStrAfter.getValues() - attrStrBefore.getValues() == null - - Attribute attrCharAfter = ncFile.findGlobalAttribute(attrCharBefore.fullName) - attrCharBefore.getValues().equals attrCharAfter.getValues() - attrCharBefore.getValues() == null - - Attribute attrNullCharAfter = ncFile.findGlobalAttribute(attrNullCharBefore.fullName) - attrNullCharBefore.getValues().getSize() == attrNullCharAfter.getValues().getSize() - attrNullCharBefore.getValues().getSize() == 1 - attrNullCharBefore.getValue(0).equals(attrNullCharAfter.getValue(0)) - attrNullCharBefore.equals(attrNullCharAfter) - - cleanup: "close writer and reader" - ncWriter?.close() // Under normal circumstances, this will already be closed. Luckily method is idempotent. - ncFile?.close() - } -} diff --git a/netcdf4/src/test/java/ucar/nc2/jni/netcdf/TestNc4IospMisc.java b/netcdf4/src/test/java/ucar/nc2/jni/netcdf/TestNc4IospMisc.java new file mode 100644 index 0000000000..757173b63d --- /dev/null +++ b/netcdf4/src/test/java/ucar/nc2/jni/netcdf/TestNc4IospMisc.java @@ -0,0 +1,144 @@ +package ucar.nc2.jni.netcdf; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import ucar.ma2.Array; +import ucar.ma2.ArrayChar; +import ucar.ma2.DataType; +import ucar.ma2.MAMath; +import ucar.nc2.Attribute; +import ucar.nc2.Dimension; +import ucar.nc2.NetcdfFile; +import ucar.nc2.NetcdfFileWriter; +import ucar.nc2.Variable; + +public class TestNc4IospMisc { + + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + + /* + * Demonstrates bug from https://www.unidata.ucar.edu/mailing_lists/archives/netcdf-java/2017/msg00012.html + * Prior to fix, this test would fail for 'u_short', 'u_int', and 'u_long' variables with + * "Unknown userType == 8", "Unknown userType == 9", and "Unknown userType == 11" errors respectively. + */ + @Test + public void shouldReadDataSectionOfVariables() throws Exception { + String[] varNames = new String[] {"u_byte", "u_short", "u_int", "u_long"}; + List expectedData = new ArrayList<>(); + // Will overflow to [-128, -127, -126] + expectedData + .add(Array.makeFromJavaArray(new byte[] {(byte) (1 << 7), (byte) ((1 << 7) + 1), (byte) ((1 << 7) + 2)})); + expectedData.add( + Array.makeFromJavaArray(new short[] {(short) (1 << 15), (short) ((1 << 15) + 1), (short) ((1 << 15) + 2)})); + expectedData.add(Array.makeFromJavaArray(new int[] {(1 << 31), (1 << 31) + 1, (1 << 31) + 2})); + expectedData.add(Array.makeFromJavaArray(new long[] {(1L << 63), (1L << 63) + 1, (1L << 63) + 2})); + + File file = new File(getClass().getResource("unsigned.nc4").toURI()); + assertThat(file.exists()).isTrue(); + + try ( + NetcdfFile ncFile = NetcdfFile.open(file.getAbsolutePath(), Nc4Iosp.class.getCanonicalName(), -1, null, null)) { + Nc4Iosp nc4Iosp = (Nc4Iosp) ncFile.getIosp(); + + for (int i = 0; i < varNames.length; i++) { + Variable var = ncFile.findVariable(varNames[i]); + Nc4Iosp.Vinfo vinfo = (Nc4Iosp.Vinfo) var.getSPobject(); + Array array = nc4Iosp.readDataSection(vinfo.g4.grpid, vinfo.varid, vinfo.typeid, var.getShapeAsSection()); + MAMath.nearlyEquals(array, expectedData.get(i)); + } + } + } + + /* + * Demonstrates bug from + * https://andy.unidata.ucar.edu/esupport/staff/index.php?_m=tickets&_a=viewticket&ticketid=28098 + * Prior to fix, primary2Dim and primary3Dim were not being identified as unlimited. + */ + @Test + public void shouldContainMultipleGroupsWithUnlimitedDimensions() throws Exception { + File file = new File(this.getClass().getResource("DBP-690959.nc4").toURI()); + assertThat(file.exists()).isTrue(); + + try ( + NetcdfFile ncFile = NetcdfFile.open(file.getAbsolutePath(), Nc4Iosp.class.getCanonicalName(), -1, null, null)) { + Dimension primary1Dim = ncFile.findDimension("/group1/primary"); + Dimension primary2Dim = ncFile.findDimension("/group2/primary"); + Dimension primary3Dim = ncFile.findDimension("/group3/primary"); + + assertThat(primary1Dim.isUnlimited()).isTrue(); + assertThat(primary2Dim.isUnlimited()).isTrue(); + assertThat(primary3Dim.isUnlimited()).isTrue(); + } + } + + @Test + public void shouldWriteUnlimitedDimension() throws Exception { + File tempFile = new File(tempFolder.getRoot(), "Nc4IospMiscSpec.nc4"); + + try (NetcdfFileWriter ncWriter = + NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf4, tempFile.getAbsolutePath())) { + Dimension dimBefore = ncWriter.addDimension(null, "dim", 3, true, false); + ncWriter.create(); + ncWriter.close(); + + try (NetcdfFile ncFile = NetcdfFile.open(tempFile.getAbsolutePath())) { + Dimension dimAfter = ncFile.findDimension(dimBefore.getFullName()); + // Failed prior to fix, because dimAfter was not unlimited. + assertThat(dimBefore).isEqualTo(dimAfter); + } + } + } + + @Test + public void shouldCreateFileWithNullValuedAttributes() throws Exception { + File tempFile = new File(tempFolder.getRoot(), "Nc4IospMiscSpec.nc4"); + + try (NetcdfFileWriter ncWriter = + NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf4, tempFile.getAbsolutePath())) { + Attribute attrNum = new Attribute("nullvalnum", DataType.INT); + Attribute attrNumBefore = ncWriter.addGlobalAttribute(attrNum); + + Attribute attrStr = new Attribute("nullvalstr", DataType.STRING); + Attribute attrStrBefore = ncWriter.addGlobalAttribute(attrStr); + + Attribute attrChar = new Attribute("nullvalchar", DataType.CHAR); + Attribute attrCharBefore = ncWriter.addGlobalAttribute(attrChar); + + // add a character valued attribute with a specific null char value" + Attribute attrNullChar = new Attribute("nullcharvalchar", DataType.CHAR); + Array attrNullCharValue = ArrayChar.makeFromString("\0", 1); + attrNullChar.setValues(attrNullCharValue); + Attribute attrNullCharBefore = ncWriter.addGlobalAttribute(attrNullChar); + + ncWriter.create(); + ncWriter.close(); + + try (NetcdfFile ncFile = NetcdfFile.open(tempFile.getAbsolutePath())) { + Attribute attrNumAfter = ncFile.findGlobalAttribute(attrNumBefore.getFullName()); + assertThat(attrNumBefore.getValues()).isEqualTo(attrNumAfter.getValues()); + assertThat(attrNumBefore.getValues()).isNull(); + + Attribute attrStrAfter = ncFile.findGlobalAttribute(attrStrBefore.getFullName()); + assertThat(attrStrAfter.getValues()).isEqualTo(attrStrBefore.getValues()); + assertThat(attrStrAfter.getValues()).isNull(); + + Attribute attrCharAfter = ncFile.findGlobalAttribute(attrCharBefore.getFullName()); + assertThat(attrCharAfter.getValues()).isEqualTo(attrCharBefore.getValues()); + assertThat(attrCharAfter.getValues()).isNull(); + + Attribute attrNullCharAfter = ncFile.findGlobalAttribute(attrNullCharBefore.getFullName()); + assertThat(attrNullCharAfter.getValues().getSize()).isEqualTo(attrNullCharBefore.getValues().getSize()); + assertThat(attrNullCharAfter.getValues().getSize()).isEqualTo(1); + assertThat(attrNullCharAfter.getValue(0)).isEqualTo(attrNullCharBefore.getValue(0)); + assertThat(attrNullCharAfter).isEqualTo(attrNullCharBefore); + } + } + } +} diff --git a/netcdf4/src/unloadedTest/groovy/ucar/nc2/jni/netcdf/UnloadedNc4IospSpec.groovy b/netcdf4/src/unloadedTest/groovy/ucar/nc2/jni/netcdf/UnloadedNc4IospSpec.groovy deleted file mode 100644 index 05e308a10f..0000000000 --- a/netcdf4/src/unloadedTest/groovy/ucar/nc2/jni/netcdf/UnloadedNc4IospSpec.groovy +++ /dev/null @@ -1,38 +0,0 @@ -package ucar.nc2.jni.netcdf - -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import spock.lang.Specification -import ucar.nc2.Attribute - -/** - * Test various aspects of Nc4Iosp when the C lib is NOT loaded. - * - * @author cwardgar - * @since 2016-12-27 - */ -class UnloadedNc4IospSpec extends Specification { - private static final Logger logger = LoggerFactory.getLogger(UnloadedNc4IospSpec) - - def "flush in define mode, without C lib loaded"() { - setup: - Nc4Iosp nc4Iosp = new Nc4Iosp() - - when: "flush while still in define mode" - nc4Iosp.flush() - - then: "no NullPointerException is thrown" - notThrown NullPointerException // Would fail before the bug fix in this commit. - } - - def "updateAttribute in define mode, without C lib loaded"() { - setup: - Nc4Iosp nc4Iosp = new Nc4Iosp() - - when: "updateAttribute while still in define mode" - nc4Iosp.updateAttribute(null, new Attribute("foo", "bar")) - - then: "no IOException is thrown" - notThrown NullPointerException // Would fail before the bug fix in this commit. - } -} diff --git a/netcdf4/src/unloadedTest/java/ucar/nc2/jni/netcdf/TestUnloadedNc4Iosp.java b/netcdf4/src/unloadedTest/java/ucar/nc2/jni/netcdf/TestUnloadedNc4Iosp.java new file mode 100644 index 0000000000..3db804c4c7 --- /dev/null +++ b/netcdf4/src/unloadedTest/java/ucar/nc2/jni/netcdf/TestUnloadedNc4Iosp.java @@ -0,0 +1,20 @@ +package ucar.nc2.jni.netcdf; + +import java.io.IOException; +import org.junit.Test; +import ucar.nc2.Attribute; + +public class TestUnloadedNc4Iosp { + + @Test + public void shouldFlushInDefineModeWithoutCLib() throws IOException { + Nc4Iosp nc4Iosp = new Nc4Iosp(); + nc4Iosp.flush(); + } + + @Test + public void shouldUpdateAttributeInDefineModeWithoutCLib() throws IOException { + Nc4Iosp nc4Iosp = new Nc4Iosp(); + nc4Iosp.updateAttribute(null, new Attribute("foo", "bar")); + } +} diff --git a/uibase/build.gradle b/uibase/build.gradle index d9d76a059b..3627dda52e 100644 --- a/uibase/build.gradle +++ b/uibase/build.gradle @@ -3,7 +3,6 @@ ext.title = 'UI base library' apply from: "$rootDir/gradle/any/dependencies.gradle" apply from: "$rootDir/gradle/any/java-library.gradle" -apply plugin: 'groovy' // For Spock tests. dependencies { compile enforcedPlatform(project(':netcdf-java-platform')) @@ -32,8 +31,6 @@ dependencies { testImplementation project(':cdm:cdm-core') testImplementation project(':cdm-test-utils') - testImplementation 'org.codehaus.groovy:groovy-all' // for spock - testImplementation 'org.spockframework:spock-core' testImplementation 'commons-io:commons-io' testImplementation 'org.mockito:mockito-core' testImplementation 'com.google.truth:truth'