From 22327e175983c5a5fdca013092bd301399eb478c Mon Sep 17 00:00:00 2001 From: Emile Sonneveld Date: Thu, 31 Oct 2024 16:03:08 +0100 Subject: [PATCH] React to MR: Cleanup code. Use @TempDir. Prune imports. https://github.com/Open-EO/openeo-geotrellis-extensions/issues/329 --- .../openeo/geotrellis/geotiff/package.scala | 117 ++++++++++-------- .../geotrellis/geotiff/TileGridTest.scala | 23 ++-- .../geotiff/WriteRDDToGeotiffTest.scala | 15 +-- .../layers/FileLayerProviderTest.scala | 57 ++++----- 4 files changed, 103 insertions(+), 109 deletions(-) diff --git a/openeo-geotrellis/src/main/scala/org/openeo/geotrellis/geotiff/package.scala b/openeo-geotrellis/src/main/scala/org/openeo/geotrellis/geotiff/package.scala index b58124258..d08a85e20 100644 --- a/openeo-geotrellis/src/main/scala/org/openeo/geotrellis/geotiff/package.scala +++ b/openeo-geotrellis/src/main/scala/org/openeo/geotrellis/geotiff/package.scala @@ -104,6 +104,42 @@ package object geotiff { ret.map(t => (t._1, t._2, t._3)).asJava } + private val executorAttemptDirectoryPrefix = "executorAttemptDirectory" + + private def createExecutorAttemptDirectory(parentDirectory: String): Path = { + createExecutorAttemptDirectory(Path.of(parentDirectory)) + } + + private def createExecutorAttemptDirectory(parentDirectory: Path): Path = { + // Multiple executors with the same task can run at the same time. + // Writing their output to the same path would create a racing condition. + // Let's provide a unique directory for each executor: + val rand = new java.security.SecureRandom().nextLong() + val uniqueFolderName = executorAttemptDirectoryPrefix + java.lang.Long.toUnsignedString(rand) + val executorAttemptDirectory = Paths.get(parentDirectory + "/" + uniqueFolderName) + Files.createDirectories(executorAttemptDirectory) + executorAttemptDirectory + } + + private def moveFromExecutorAttemptDirectory(parentDirectory: Path, absolutePath: String): Path = { + // Move output file to standard location. (On S3, a move is more a copy and delete): + val relativePath = parentDirectory.relativize(Path.of(absolutePath)).toString + if (!relativePath.startsWith(executorAttemptDirectoryPrefix)) throw new Exception() + // Remove the executorAttemptDirectory part from the path: + val destinationPath = parentDirectory.resolve(relativePath.substring(relativePath.indexOf("/") + 1)) + waitTillPathAvailable(Path.of(absolutePath)) + Files.move(Path.of(absolutePath), destinationPath) + destinationPath + } + + private def cleanUpExecutorAttemptDirectory(parentDirectory: Path): Unit = { + Files.list(parentDirectory).forEach { p => + if (Files.isDirectory(p) && p.getFileName.toString.startsWith(executorAttemptDirectoryPrefix)) { + FileUtils.deleteDirectory(p.toFile) + } + } + } + /** * Save temporal rdd, on the executors * @@ -176,10 +212,8 @@ package object geotiff { val segmentCount = bandSegmentCount * tiffBands // Each executor writes to a unique folder to avoid conflicts: - val uniqueFolderName = "tmp" + java.lang.Long.toUnsignedString(new java.security.SecureRandom().nextLong()) - val base = Paths.get(path + "/" + uniqueFolderName) - Files.createDirectories(base) - val thePath = base.resolve(filename).toString + val executorAttemptDirectory = createExecutorAttemptDirectory(path) + val thePath = executorAttemptDirectory.resolve(filename).toString // filter band tags that match bandIndices val fo = formatOptions.deepClone() @@ -192,22 +226,14 @@ package object geotiff { tileLayout, compression, cellTypes.head, tiffBands, segmentCount, fo, ) (correctedPath, timestamp, croppedExtent, bandIndices) - }.collect().map({ + }.collect().map { case (absolutePath, timestamp, croppedExtent, bandIndices) => - // Move output file to standard location. (On S3, a move is more a copy and delete): - val relativePath = Path.of(path).relativize(Path.of(absolutePath)).toString - val destinationPath = Path.of(path).resolve(relativePath.substring(relativePath.indexOf("/") + 1)) - waitTillPathAvailable(Path.of(absolutePath)) - Files.move(Path.of(absolutePath), destinationPath) + val destinationPath = moveFromExecutorAttemptDirectory(Path.of(path), absolutePath) (destinationPath.toString, timestamp, croppedExtent, bandIndices) - }).toList.asJava + }.toList.asJava + + cleanUpExecutorAttemptDirectory(Path.of(path)) - // Clean up failed tasks: - Files.list(Path.of(path)).forEach { p => - if (Files.isDirectory(p) && p.getFileName.toString.startsWith("tmp")) { - FileUtils.deleteDirectory(p.toFile) - } - } res } @@ -254,12 +280,10 @@ package object geotiff { } } val res = rdd_per_band.groupByKey().map { case ((name, bandIndex), tiles) => - val uniqueFolderName = "tmp" + java.lang.Long.toUnsignedString(new java.security.SecureRandom().nextLong()) val fixedPath = if (path.endsWith("out")) { - val base = path.substring(0, path.length - 3) + uniqueFolderName + "/" - Files.createDirectories(Path.of(base)) - base + name + val executorAttemptDirectory = createExecutorAttemptDirectory(path.substring(0, path.length - 3)) + executorAttemptDirectory + "/" + name } else { path @@ -272,27 +296,22 @@ package object geotiff { (stitchAndWriteToTiff(tiles, fixedPath, layout, crs, extent, None, None, compression, Some(fo)), Collections.singletonList(bandIndex)) - }.collect().map({ - case (absolutePath, y) => + }.collect().map { + case (absolutePath, bandIndices) => if (path.endsWith("out")) { - // Move output file to standard location. (On S3, a move is more a copy and delete): val beforeOut = path.substring(0, path.length - "out".length) - val relativePath = Path.of(beforeOut).relativize(Path.of(absolutePath)).toString - val destinationPath = beforeOut + relativePath.substring(relativePath.indexOf("/") + 1) - waitTillPathAvailable(Path.of(absolutePath)) - Files.move(Path.of(absolutePath), Path.of(destinationPath)) - (destinationPath, y) + val destinationPath = moveFromExecutorAttemptDirectory(Path.of(beforeOut), absolutePath) + (destinationPath.toString, bandIndices) } else { - (absolutePath, y) + (absolutePath, bandIndices) } - }).toList.sortBy(_._1).asJava - // Clean up failed tasks: - val beforeOut = path.substring(0, path.length - "out".length) - Files.list(Path.of(beforeOut)).forEach { p => - if (Files.isDirectory(p) && p.getFileName.toString.startsWith("tmp")) { - FileUtils.deleteDirectory(p.toFile) - } + }.toList.sortBy(_._1).asJava + + if (path.endsWith("out")) { + val beforeOut = path.substring(0, path.length - "out".length) + cleanUpExecutorAttemptDirectory(Path.of(beforeOut)) } + res } else { val tmp = saveRDDGeneric(rdd, bandCount, path, zLevel, cropBounds, formatOptions).asScala @@ -734,28 +753,18 @@ package object geotiff { }.groupByKey() .map { case ((tileId, extent), tiles) => // Each executor writes to a unique folder to avoid conflicts: - val uniqueFolderName = "tmp" + java.lang.Long.toUnsignedString(new java.security.SecureRandom().nextLong()) - val base = Paths.get(Path.of(path).getParent + "/" + uniqueFolderName) - Files.createDirectories(base) - val filePath = base + "/" + newFilePath(Path.of(path).getFileName.toString, tileId) + val executorAttemptDirectory = createExecutorAttemptDirectory(Path.of(path).getParent) + val filePath = executorAttemptDirectory + "/" + newFilePath(Path.of(path).getFileName.toString, tileId) (stitchAndWriteToTiff(tiles, filePath, layout, crs, extent, croppedExtent, cropDimensions, compression), extent) - }.collect().map({ + }.collect().map { case (absolutePath, croppedExtent) => - // Move output file to standard location. (On S3, a move is more a copy and delete): - val relativePath = Path.of(path).getParent.relativize(Path.of(absolutePath)).toString - val destinationPath = Path.of(path).getParent.resolve(relativePath.substring(relativePath.indexOf("/") + 1)) - waitTillPathAvailable(Path.of(absolutePath)) - Files.move(Path.of(absolutePath), destinationPath) + val destinationPath = moveFromExecutorAttemptDirectory(Path.of(path).getParent, absolutePath) (destinationPath.toString, croppedExtent) - }).toList.asJava + }.toList.asJava + + cleanUpExecutorAttemptDirectory(Path.of(path).getParent) - // Clean up failed tasks: - Files.list(Path.of(path).getParent).forEach { p => - if (Files.isDirectory(p) && p.getFileName.toString.startsWith("tmp")) { - FileUtils.deleteDirectory(p.toFile) - } - } res } diff --git a/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/TileGridTest.scala b/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/TileGridTest.scala index d1d3945bb..d7cdd3cc1 100644 --- a/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/TileGridTest.scala +++ b/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/TileGridTest.scala @@ -1,10 +1,5 @@ package org.openeo.geotrellis.geotiff -import better.files.File.apply - -import java.time.LocalTime.MIDNIGHT -import java.time.ZoneOffset.UTC -import java.time.{LocalDate, ZonedDateTime} import geotrellis.proj4.{CRS, LatLng} import geotrellis.raster.io.geotiff.compression.DeflateCompression import geotrellis.spark._ @@ -12,21 +7,25 @@ import geotrellis.spark.util.SparkUtils import geotrellis.vector.{Extent, ProjectedExtent} import org.apache.spark.SparkContext import org.apache.spark.storage.StorageLevel.DISK_ONLY -import org.junit._ +import org.junit.jupiter.api.io.TempDir +import org.junit.jupiter.api.{BeforeAll, Test} +import org.junit.{AfterClass, Assert} import org.openeo.geotrellis.LayerFixtures.rgbLayerProvider import org.openeo.geotrellis.png.PngTest import org.openeo.geotrellis.tile_grid.TileGrid import org.openeo.geotrellis.{LayerFixtures, geotiff} -import java.nio.file.{Files, Paths} +import java.nio.file.Path +import java.time.LocalTime.MIDNIGHT +import java.time.ZoneOffset.UTC import java.time.format.DateTimeFormatter.ISO_ZONED_DATE_TIME +import java.time.{LocalDate, ZonedDateTime} import scala.collection.JavaConverters._ -import scala.reflect.io.Directory object TileGridTest { private var sc: SparkContext = _ - @BeforeClass + @BeforeAll def setupSpark(): Unit = { // originally geotrellis.spark.util.SparkUtils.createLocalSparkContext val conf = SparkUtils.createSparkConf @@ -51,11 +50,7 @@ class TileGridTest { import TileGridTest._ @Test - def testSaveStitchWithTileGrids(): Unit = { - val outDir = Paths.get("tmp/testSaveStitchWithTileGrids/") - new Directory(outDir.toFile).deepList().foreach(_.delete()) - Files.createDirectories(outDir) - + def testSaveStitchWithTileGrids(@TempDir outDir: Path): Unit = { val date = ZonedDateTime.of(LocalDate.of(2020, 4, 5), MIDNIGHT, UTC) val bbox = ProjectedExtent(Extent(1.95, 50.95, 2.05, 51.05), LatLng) diff --git a/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/WriteRDDToGeotiffTest.scala b/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/WriteRDDToGeotiffTest.scala index 947c2b7a2..f7c1c5ac0 100644 --- a/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/WriteRDDToGeotiffTest.scala +++ b/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/geotiff/WriteRDDToGeotiffTest.scala @@ -13,14 +13,15 @@ import geotrellis.vector._ import geotrellis.vector.io.json.GeoJson import org.apache.spark.{SparkConf, SparkContext, SparkEnv} import org.junit.Assert._ -import org.junit._ +import org.junit.jupiter.api.io.TempDir +import org.junit.jupiter.api.{BeforeAll, Test} import org.junit.rules.TemporaryFolder +import org.junit.{AfterClass, Rule} import org.openeo.geotrellis.{LayerFixtures, OpenEOProcesses, ProjectedPolygons} -import org.openeo.sparklisteners.GetInfoSparkListener import org.slf4j.{Logger, LoggerFactory} import java.nio.file.{Files, Path, Paths} -import java.time.{LocalDate, LocalTime, ZoneOffset, ZonedDateTime} +import java.time.{LocalTime, ZoneOffset, ZonedDateTime} import java.util import java.util.zip.Deflater._ import scala.annotation.meta.getter @@ -34,7 +35,7 @@ object WriteRDDToGeotiffTest{ var sc: SparkContext = _ - @BeforeClass + @BeforeAll def setupSpark() = { sc = { val conf = new SparkConf().setMaster("local[2]").setAppName(getClass.getSimpleName) @@ -151,11 +152,7 @@ class WriteRDDToGeotiffTest { } @Test - def testWriteRDD_apply_neighborhood(): Unit ={ - val outDir = Paths.get("tmp/testWriteRDD_apply_neighborhood/") - new Directory(outDir.toFile).deepList().foreach(_.delete()) - Files.createDirectories(outDir) - + def testWriteRDD_apply_neighborhood(@TempDir outDir: Path): Unit = { val layoutCols = 8 val layoutRows = 4 diff --git a/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/layers/FileLayerProviderTest.scala b/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/layers/FileLayerProviderTest.scala index 1ec2198f7..575b14dbe 100644 --- a/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/layers/FileLayerProviderTest.scala +++ b/openeo-geotrellis/src/test/scala/org/openeo/geotrellis/layers/FileLayerProviderTest.scala @@ -4,7 +4,6 @@ import cats.data.NonEmptyList import geotrellis.layer.{FloatingLayoutScheme, LayoutTileSource, SpaceTimeKey, SpatialKey, TileLayerMetadata} import geotrellis.proj4.{CRS, LatLng} import geotrellis.raster.gdal.{GDALIOException, GDALRasterSource} -import geotrellis.raster.geotiff.GeoTiffRasterSource import geotrellis.raster.io.geotiff.GeoTiff import geotrellis.raster.resample.{Bilinear, CubicConvolution, ResampleMethod} import geotrellis.raster.summary.polygonal.Summary @@ -19,27 +18,27 @@ import geotrellis.vector._ import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.junit.jupiter.api.Assertions.{assertEquals, assertNotSame, assertSame, assertTrue} -import org.junit.jupiter.api.{AfterAll, BeforeAll, Disabled, Test, Timeout} +import org.junit.jupiter.api.io.TempDir +import org.junit.jupiter.api._ import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ValueSource import org.openeo.geotrellis.TestImplicits._ import org.openeo.geotrellis.file.PyramidFactory -import org.openeo.geotrellis.layers.FileLayerProvider.rasterSourceRDD import org.openeo.geotrellis.geotiff._ +import org.openeo.geotrellis.layers.FileLayerProvider.rasterSourceRDD import org.openeo.geotrellis.netcdf.{NetCDFOptions, NetCDFRDDWriter} -import org.openeo.geotrellis.{LayerFixtures, OpenEOProcesses, ProjectedPolygons} +import org.openeo.geotrellis.{LayerFixtures, ProjectedPolygons} import org.openeo.geotrelliscommon.DatacubeSupport._ import org.openeo.geotrelliscommon.{ConfigurableSpaceTimePartitioner, DataCubeParameters, DatacubeSupport, NoCloudFilterStrategy, SpaceTimeByMonthPartitioner, SparseSpaceTimePartitioner} import org.openeo.opensearch.OpenSearchResponses.{CreoFeatureCollection, FeatureCollection, Link} import org.openeo.opensearch.backends.CreodiasClient import org.openeo.opensearch.{OpenSearchClient, OpenSearchResponses} import org.openeo.sparklisteners.GetInfoSparkListener -import org.slf4j.LoggerFactory import ucar.nc2.NetcdfFile import ucar.nc2.util.CompareNetcdf2 import java.net.{URI, URL} -import java.nio.file.{Files, Paths} +import java.nio.file.{Files, Path, Paths} import java.time.ZoneOffset.UTC import java.time.{LocalDate, ZoneId, ZonedDateTime} import java.util @@ -47,7 +46,6 @@ import java.util.Formatter import java.util.concurrent.TimeUnit import scala.collection.immutable import scala.io.Source -import scala.reflect.io.Directory object FileLayerProviderTest { private var _sc: Option[SparkContext] = None @@ -1079,8 +1077,7 @@ class FileLayerProviderTest extends RasterMatchers{ } @Test - def testPixelValueOffsetNeededCorner(): Unit = { - Files.createDirectories(Paths.get("tmp/")) + def testPixelValueOffsetNeededCorner(@TempDir outDir: Path): Unit = { // This selection will go over a corner that has nodata pixels val layer = testPixelValueOffsetNeeded( "/org/openeo/geotrellis/testPixelValueOffsetNeededCorner.json", @@ -1088,14 +1085,14 @@ class FileLayerProviderTest extends RasterMatchers{ LocalDate.of(2023, 4, 5), ) val cubeSpatial = layer.toSpatial() - cubeSpatial.writeGeoTiff("tmp/testPixelValueOffsetNeededCorner.tiff") + cubeSpatial.writeGeoTiff(f"$outDir/testPixelValueOffsetNeededCorner.tiff") val arr = cubeSpatial.collect().array assertTrue(isNoData(arr(1)._2.toArrayTile().band(0).get(162, 250))) assertEquals(172, arr(0)._2.toArrayTile().band(0).get(5, 5), 1) } @Test - def testPixelValueOffsetNeededDark(): Unit = { + def testPixelValueOffsetNeededDark(@TempDir outDir: Path): Unit = { // This will cover an area where pixels go under 0 val layer = testPixelValueOffsetNeeded( "/org/openeo/geotrellis/testPixelValueOffsetNeededDark.json", @@ -1103,7 +1100,7 @@ class FileLayerProviderTest extends RasterMatchers{ LocalDate.of(2023, 1, 17), ) val cubeSpatial = layer.toSpatial() - cubeSpatial.writeGeoTiff("tmp/testPixelValueOffsetNeededDark.tiff") + cubeSpatial.writeGeoTiff(f"$outDir/testPixelValueOffsetNeededDark.tiff") val band = cubeSpatial.collect().array(0)._2.toArrayTile().band(0) assertEquals(888, band.get(0, 0), 1) @@ -1123,11 +1120,7 @@ class FileLayerProviderTest extends RasterMatchers{ @Test - def testMissingS2(): Unit = { - val outDir = Paths.get("tmp/FileLayerProviderTest/") - new Directory(outDir.toFile).deleteRecursively() - Files.createDirectories(outDir) - + def testMissingS2(@TempDir outDir: Path): Unit = { val from = ZonedDateTime.parse("2024-03-24T00:00:00Z") val extent = Extent(-162.2501, 70.1839, -161.2879, 70.3401) @@ -1277,8 +1270,7 @@ class FileLayerProviderTest extends RasterMatchers{ } @Test - def testSamplingLoadPerProduct():Unit = { - + def testSamplingLoadPerProduct(@TempDir outDir: Path):Unit = { val srs32631 = "EPSG:32631" val projected_polygons_native_crs = ProjectedPolygons.fromExtent(Extent(703109 - 100, 5600100, 709000, 5610000 - 100), srs32631) val dataCubeParameters = new DataCubeParameters() @@ -1290,16 +1282,16 @@ class FileLayerProviderTest extends RasterMatchers{ val cube = LayerFixtures.sentinel2Cube(LocalDate.of(2023, 4, 5), projected_polygons_native_crs, "/org/openeo/geotrellis/testPixelValueOffsetNeededCorner.json",dataCubeParameters) val opts = new GTiffOptions opts.setFilenamePrefix("load_per_product") - saveRDDTemporal(cube,"./", formatOptions = opts) + saveRDDTemporal(cube,outDir.toString, formatOptions = opts) dataCubeParameters.loadPerProduct = false val cube_ref = LayerFixtures.sentinel2Cube(LocalDate.of(2023, 4, 5), projected_polygons_native_crs, "/org/openeo/geotrellis/testPixelValueOffsetNeededCorner.json",dataCubeParameters) opts.setFilenamePrefix("load_regular") - saveRDDTemporal(cube_ref,"./", formatOptions = opts) + saveRDDTemporal(cube_ref, outDir.toString, formatOptions = opts) - val reference = GeoTiff.readMultiband("./load_regular_2023-04-05Z.tif").raster - val actual = GeoTiff.readMultiband("./load_per_product_2023-04-05Z.tif").raster + val reference = GeoTiff.readMultiband(f"$outDir/load_regular_2023-04-05Z.tif").raster + val actual = GeoTiff.readMultiband(f"$outDir/load_per_product_2023-04-05Z.tif").raster assertRastersEqual(actual,reference) @@ -1329,8 +1321,7 @@ class FileLayerProviderTest extends RasterMatchers{ } @Test - def testMultibandCOGViaSTAC(): Unit = { - Files.createDirectories(Paths.get("tmp/")) + def testMultibandCOGViaSTAC(@TempDir outDir: Path): Unit = { val factory = LayerFixtures.STACCOGCollection() val extent = Extent(-162.2501, 70.1839, -161.2879, 70.3401) @@ -1343,7 +1334,7 @@ class FileLayerProviderTest extends RasterMatchers{ bands.add("temperature-mean") bands.add("precipitation-flux") - val outLocation = "tmp/testMultibandCOGViaSTAC.nc" + val outLocation = f"$outDir/testMultibandCOGViaSTAC.nc" val referenceFile = "https://artifactory.vgt.vito.be/artifactory/testdata-public/openeo/geotrellis_extrensions/testMultibandCOGViaSTAC.nc" writeToNetCDFAndCompare(projected_polygons_native_crs, dataCubeParameters, bands, factory, outLocation, referenceFile) @@ -1352,8 +1343,7 @@ class FileLayerProviderTest extends RasterMatchers{ @Test - def testMultibandCOGViaSTACResample(): Unit = { - Files.createDirectories(Paths.get("tmp/")) + def testMultibandCOGViaSTACResample(@TempDir outDir: Path): Unit = { val factory = LayerFixtures.STACCOGCollection(resolution = CellSize(10.0,10.0)) val extent = Extent(-162.2501, 70.1839, -161.2879, 70.3401) @@ -1368,12 +1358,13 @@ class FileLayerProviderTest extends RasterMatchers{ bands.add("temperature-mean") bands.add("precipitation-flux") - writeToNetCDFAndCompare(projected_polygons_native_crs, dataCubeParameters, bands, factory, "tmp/testMultibandCOGViaSTACResampledCubic.nc", "https://artifactory.vgt.vito.be/artifactory/testdata-public/openeo/geotrellis_extrensions/testMultibandCOGViaSTACResampledCubic.nc") + val referenceFile = "https://artifactory.vgt.vito.be/artifactory/testdata-public/openeo/geotrellis_extrensions/testMultibandCOGViaSTACResampledCubic.nc" + writeToNetCDFAndCompare(projected_polygons_native_crs, dataCubeParameters, bands, factory, + f"$outDir/testMultibandCOGViaSTACResampledCubic.nc", referenceFile) } @Test - def testMultibandCOGViaSTACResampleReadOneBand(): Unit = { - Files.createDirectories(Paths.get("tmp/")) + def testMultibandCOGViaSTACResampleReadOneBand(@TempDir outDir: Path): Unit = { val factory = LayerFixtures.STACCOGCollection(resolution = CellSize(10.0,10.0),util.Arrays.asList("precipitation-flux")) val extent = Extent(-162.2501, 70.1839, -161.2879, 70.3401) @@ -1385,7 +1376,9 @@ class FileLayerProviderTest extends RasterMatchers{ val bands: util.ArrayList[String] = new util.ArrayList[String]() bands.add("precipitation-flux") - writeToNetCDFAndCompare(projected_polygons_native_crs, dataCubeParameters, bands, factory, "tmp/testSinglebandCOGViaSTACResampled.nc", "https://artifactory.vgt.vito.be/artifactory/testdata-public/openeo/geotrellis_extrensions/testSinglebandCOGViaSTACResampled.nc") + val referenceFile = "https://artifactory.vgt.vito.be/artifactory/testdata-public/openeo/geotrellis_extrensions/testSinglebandCOGViaSTACResampled.nc" + writeToNetCDFAndCompare(projected_polygons_native_crs, dataCubeParameters, bands, factory, + f"$outDir/testSinglebandCOGViaSTACResampled.nc", referenceFile) } private def datacubeParams(polygonsAOI: ProjectedPolygons, resampleMethod: ResampleMethod) = {