diff --git a/external/pdal_wrench/alg.hpp b/external/pdal_wrench/alg.hpp index ef2837ade3cc..107557e85f2c 100644 --- a/external/pdal_wrench/alg.hpp +++ b/external/pdal_wrench/alg.hpp @@ -117,6 +117,7 @@ struct Translate : public Alg std::string transformCrs; std::string transformCoordOp; std::string outputFormat; // las / laz / copc + std::string transformMatrix; // 4x4 matrix as 16 space-separated values // args - initialized in addArgs() pdal::Arg* argOutput = nullptr; @@ -333,3 +334,116 @@ struct ToVector : public Alg virtual void preparePipelines(std::vector>& pipelines) override; virtual void finalize(std::vector>& pipelines) override; }; + + +struct ClassifyGround : public Alg +{ + ClassifyGround() { isStreaming = false; } + + // parameters from the user + std::string outputFile; + std::string outputFormat; // las / laz / copc + + double cellSize = 1.0; + double scalar = 1.25; + double slope = 0.15; + double threshold = 0.5; + double windowSize = 18.0; + + // args - initialized in addArgs() + pdal::Arg* argOutput = nullptr; + pdal::Arg* argOutputFormat = nullptr; + pdal::Arg* argCellSize = nullptr; + + pdal::Arg* argScalar = nullptr; + pdal::Arg* argSlope = nullptr; + pdal::Arg* argThreshold = nullptr; + pdal::Arg* argWindowSize = nullptr; + + std::vector tileOutputFiles; + + // impl + virtual void addArgs() override; + virtual bool checkArgs() override; + virtual void preparePipelines(std::vector>& pipelines) override; + virtual void finalize(std::vector>& pipelines) override; +}; + + +struct FilterNoise: public Alg +{ + + FilterNoise() { isStreaming = false; } + + std::vector tileOutputFiles; + + // parameters from the user + std::string outputFile; + std::string outputFormat; // las / laz / copc + std::string algorithm = "statistical"; // "statistical" or "radius" + bool removeNoisePoints = false; + + // radius params + double radiusMinK = 2; + double radiusRadius = 1.0; + + // statistical params + int statisticalMeanK = 8; + double statisticalMultiplier = 2.0; + + // args - initialized in addArgs() + pdal::Arg* argOutput = nullptr; + pdal::Arg* argOutputFormat = nullptr; + pdal::Arg* argAlgorithm = nullptr; + pdal::Arg* argRemoveNoisePoints = nullptr; + pdal::Arg* argRadiusMinK = nullptr; + pdal::Arg* argRadiusRadius = nullptr; + pdal::Arg* argStatisticalMeanK = nullptr; + pdal::Arg* argStatisticalMultiplier = nullptr; + + // impl + virtual void addArgs() override; + virtual bool checkArgs() override; + virtual void preparePipelines(std::vector>& pipelines) override; + virtual void finalize(std::vector>& pipelines) override; +}; + + +struct HeightAboveGround : public Alg +{ + HeightAboveGround() { isStreaming = false; } + + // parameters from the user + std::string outputFile; + std::string outputFormat; // las / laz / copc / vpc + bool replaceZWithHeightAboveGround = true; + std::string algorithm = "nn"; + + // NN parameters + int nnCount = 1; + int nnMaxDistance = 0; + + // Delaunay parameters + int delaunayCount = 10; + + // args - initialized in addArgs() + pdal::Arg* argOutput = nullptr; + pdal::Arg* argOutputFormat = nullptr; + pdal::Arg* argReplaceZWithHeightAboveGround = nullptr; + pdal::Arg* argAlgorithm = nullptr; + + // args -NN parameters + pdal::Arg* argNNCount = nullptr; + pdal::Arg* argNNMaxDistance = nullptr; + + // args - Delaunay parameters + pdal::Arg* argDelaunayCount = nullptr; + + std::vector tileOutputFiles; + + // impl + virtual void addArgs() override; + virtual bool checkArgs() override; + virtual void preparePipelines(std::vector>& pipelines) override; + virtual void finalize(std::vector>& pipelines) override; +}; \ No newline at end of file diff --git a/external/pdal_wrench/classify_ground.cpp b/external/pdal_wrench/classify_ground.cpp new file mode 100644 index 000000000000..4baea9bc307a --- /dev/null +++ b/external/pdal_wrench/classify_ground.cpp @@ -0,0 +1,167 @@ +/***************************************************************************** + * Copyright (c) 2025, Lutra Consulting Ltd. and Hobu, Inc. * + * * + * All rights reserved. * + * * + * This program is free software; you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation; either version 3 of the License, or * + * (at your option) any later version. * + * * + ****************************************************************************/ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include + +#include "utils.hpp" +#include "alg.hpp" +#include "vpc.hpp" + +using namespace pdal; + +namespace fs = std::filesystem; + + +void ClassifyGround::addArgs() +{ + argOutput = &programArgs.add("output,o", "Output point cloud file", outputFile); + argOutputFormat = &programArgs.add("output-format", "Output format (las/laz/copc)", outputFormat); + + argCellSize = &programArgs.add("cell-size", "Sets the grid cell size in map units. Smaller values give finer detail but may increase noise.", cellSize, 1.0); + argScalar = &programArgs.add("scalar", "Increases the threshold on steeper slopes. Raise this for rough terrain.", scalar, 1.25); + argSlope = &programArgs.add("slope", "Controls how much terrain slope is tolerated as ground. Increase for steep terrain.", slope, 0.15); + argThreshold = &programArgs.add("threshold", " Elevation threshold for separating ground from objects. Higher values allow larger deviations from ground.", threshold, 0.5); + argWindowSize = &programArgs.add("window-size", "The maximum filter window size. Increase to better identify large buildings or objects, decrease to protect smaller features.", windowSize, 18.0); +} + +bool ClassifyGround::checkArgs() +{ + if (!argOutput->set()) + { + std::cerr << "missing output" << std::endl; + return false; + } + + if (argOutputFormat->set()) + { + if (outputFormat != "las" && outputFormat != "laz" && outputFormat != "copc") + { + std::cerr << "unknown output format: " << outputFormat << std::endl; + return false; + } + } + else + outputFormat = "las"; // uncompressed by default + + return true; +} + +static std::unique_ptr pipeline(ParallelJobInfo *tile, pdal::Options &filterOptions) +{ + std::unique_ptr manager( new PipelineManager ); + + Stage& r = makeReader(manager.get(), tile->inputFilenames[0]); + + Stage *last = &r; + + // filtering + if (!tile->filterBounds.empty()) + { + Options filter_opts; + filter_opts.add(pdal::Option("bounds", tile->filterBounds)); + + if (readerSupportsBounds(r)) + { + // Reader of the format can do the filtering - use that whenever possible! + r.addOptions(filter_opts); + } + else + { + // Reader can't do the filtering - do it with a filter + last = &manager->makeFilter( "filters.crop", *last, filter_opts); + } + } + + if (!tile->filterExpression.empty()) + { + Options filter_opts; + filter_opts.add(pdal::Option("expression", tile->filterExpression)); + last = &manager->makeFilter( "filters.expression", *last, filter_opts); + } + + last = &manager->makeFilter( "filters.smrf", *last, filterOptions); + + makeWriter(manager.get(), tile->outputFilename, last); + + return manager; +} + + +void ClassifyGround::preparePipelines(std::vector>& pipelines) +{ + pdal::Options filterOptions; + filterOptions.add(pdal::Option("cell", cellSize)); + filterOptions.add(pdal::Option("scalar", scalar)); + filterOptions.add(pdal::Option("slope", slope)); + filterOptions.add(pdal::Option("threshold", threshold)); + filterOptions.add(pdal::Option("window", windowSize)); + + + if (ends_with(inputFile, ".vpc")) + { + // for /tmp/hello.vpc we will use /tmp/hello dir for all results + fs::path outputParentDir = fs::path(outputFile).parent_path(); + fs::path outputSubdir = outputParentDir / fs::path(outputFile).stem(); + fs::create_directories(outputSubdir); + + // VPC handling + VirtualPointCloud vpc; + if (!vpc.read(inputFile)) + return; + + for (const VirtualPointCloud::File& f : vpc.files) + { + ParallelJobInfo tile(ParallelJobInfo::FileBased, BOX2D(), filterExpression, filterBounds); + tile.inputFilenames.push_back(f.filename); + + // for input file /x/y/z.las that goes to /tmp/hello.vpc, + // individual output file will be called /tmp/hello/z.las + fs::path inputBasename = fileStem(f.filename); + + if (!ends_with(outputFile, ".vpc")) + tile.outputFilename = (outputSubdir / inputBasename).string() + ".las"; + else + tile.outputFilename = (outputSubdir / inputBasename).string() + "." + outputFormat; + + tileOutputFiles.push_back(tile.outputFilename); + + pipelines.push_back(pipeline(&tile, filterOptions)); + } + } + else + { + ParallelJobInfo tile(ParallelJobInfo::Single, BOX2D(), filterExpression, filterBounds); + tile.inputFilenames.push_back(inputFile); + tile.outputFilename = outputFile; + + pipelines.push_back(pipeline(&tile, filterOptions)); + } +} + +void ClassifyGround::finalize(std::vector>&) +{ + if (tileOutputFiles.empty()) + return; + + buildOutput(outputFile, tileOutputFiles); +} \ No newline at end of file diff --git a/external/pdal_wrench/clip.cpp b/external/pdal_wrench/clip.cpp index 300f8841b75e..c7614c0ec6e6 100644 --- a/external/pdal_wrench/clip.cpp +++ b/external/pdal_wrench/clip.cpp @@ -116,7 +116,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, const pd std::unique_ptr manager( new PipelineManager ); - Stage& r = manager->makeReader( tile->inputFilenames[0], ""); + Stage& r = makeReader(manager.get(), tile->inputFilenames[0]); Stage *last = &r; @@ -146,9 +146,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, const pd last = &manager->makeFilter( "filters.crop", *last, crop_opts ); - pdal::Options writer_opts; - writer_opts.add(pdal::Option("forward", "all")); - manager->makeWriter( tile->outputFilename, "", *last, writer_opts); + makeWriter(manager.get(), tile->outputFilename, last); return manager; } @@ -222,29 +220,5 @@ void Clip::finalize(std::vector>&) if (tileOutputFiles.empty()) return; - // now build a new output VPC - std::vector args; - args.push_back("--output=" + outputFile); - for (std::string f : tileOutputFiles) - args.push_back(f); - - if (ends_with(outputFile, ".vpc")) - { - // now build a new output VPC - buildVpc(args); - } - else - { - // merge all the output files into a single file - Merge merge; - // for copc set isStreaming to false - if (ends_with(outputFile, ".copc.laz")) - { - merge.isStreaming = false; - } - runAlg(args, merge); - - // remove files as they are not needed anymore - they are merged - removeFiles(tileOutputFiles, true); - } + buildOutput(outputFile, tileOutputFiles); } diff --git a/external/pdal_wrench/filter_noise.cpp b/external/pdal_wrench/filter_noise.cpp new file mode 100644 index 000000000000..92c1493e8bb1 --- /dev/null +++ b/external/pdal_wrench/filter_noise.cpp @@ -0,0 +1,210 @@ +/***************************************************************************** + * Copyright (c) 2025, Lutra Consulting Ltd. and Hobu, Inc. * + * * + * All rights reserved. * + * * + * This program is free software; you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation; either version 3 of the License, or * + * (at your option) any later version. * + * * + ****************************************************************************/ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include + +#include "utils.hpp" +#include "alg.hpp" +#include "vpc.hpp" + +using namespace pdal; + +namespace fs = std::filesystem; + +void FilterNoise::addArgs() +{ + argOutput = &programArgs.add("output,o", "Output point cloud file", outputFile); + argOutputFormat = &programArgs.add("output-format", "Output format (las/laz/copc)", outputFormat); + + argAlgorithm = &programArgs.add("algorithm", "Noise filtering algorithm to use: statistical or radius.", algorithm, "statistical"); + argRemoveNoisePoints = &programArgs.add("remove-noise-points", "Remove noise points from the output.", removeNoisePoints, false); + + // radius args + argRadiusMinK = &programArgs.add("radius-min-k", "Minimum number of neighbors in radius (radius algorithm only).", radiusMinK, 2.0); + argRadiusRadius = &programArgs.add("radius-radius", "Radius (radius method only).", radiusRadius, 1.0); + + // statistical args + argStatisticalMeanK = &programArgs.add("statistical-mean-k", "Mean number of neighbors (statistical method only)", statisticalMeanK, 8); + argStatisticalMultiplier = &programArgs.add("statistical-multiplier", "Standard deviation threshold (statistical method only).", statisticalMultiplier, 2.0); +} + +bool FilterNoise::checkArgs() +{ + if (!argOutput->set()) + { + std::cerr << "missing output" << std::endl; + return false; + } + + if (argOutputFormat->set()) + { + if (outputFormat != "las" && outputFormat != "laz" && outputFormat != "copc") + { + std::cerr << "unknown output format: " << outputFormat << std::endl; + return false; + } + } + else + outputFormat = "las"; // uncompressed by default + + if (!argAlgorithm->set()) + { + std::cerr << "missing algorithm" << std::endl; + return false; + } + else + { + if (!(algorithm == "statistical" || algorithm == "radius")) + { + std::cerr << "unknown algorithm: " << algorithm << std::endl; + return false; + } + } + + if (algorithm == "radius" && (argStatisticalMeanK->set() || argStatisticalMultiplier->set())) + { + std::cerr << "statistical- arguments are not supported with radius algorithm" << std::endl; + return false; + } + + if (algorithm == "statistical" && (argRadiusMinK->set() || argRadiusRadius->set())) + { + std::cerr << "radius- arguments are not supported with statistical algorithm" << std::endl; + return false; + } + + return true; +} + + +static std::unique_ptr pipeline(ParallelJobInfo *tile, pdal::Options &noiseFilterOptions, bool removeNoisePoints) +{ + std::unique_ptr manager( new PipelineManager ); + + Stage& r = makeReader(manager.get(), tile->inputFilenames[0]); + + Stage *last = &r; + + // filtering + if (!tile->filterBounds.empty()) + { + Options filter_opts; + filter_opts.add(pdal::Option("bounds", tile->filterBounds)); + + if (readerSupportsBounds(r)) + { + // Reader of the format can do the filtering - use that whenever possible! + r.addOptions(filter_opts); + } + else + { + // Reader can't do the filtering - do it with a filter + last = &manager->makeFilter("filters.crop", *last, filter_opts); + } + } + + if (!tile->filterExpression.empty()) + { + Options filter_opts; + filter_opts.add(pdal::Option("expression", tile->filterExpression)); + last = &manager->makeFilter("filters.expression", *last, filter_opts); + } + + last = &manager->makeFilter("filters.outlier", *last, noiseFilterOptions); + + if (removeNoisePoints) + { + Options filter_opts; + filter_opts.add(pdal::Option("expression", "Classification != 7")); + last = &manager->makeFilter( "filters.expression", *last, filter_opts); + } + + makeWriter(manager.get(), tile->outputFilename, last); + + return manager; +} + +void FilterNoise::preparePipelines(std::vector>& pipelines) +{ + pdal::Options noiseFilterOptions; + noiseFilterOptions.add(pdal::Option("method", algorithm)); + + if (algorithm == "radius") + { + noiseFilterOptions.add(pdal::Option("min_k", radiusMinK)); + noiseFilterOptions.add(pdal::Option("radius", radiusRadius)); + } + else if (algorithm == "statistical") + { + noiseFilterOptions.add(pdal::Option("mean_k", statisticalMeanK)); + noiseFilterOptions.add(pdal::Option("multiplier", statisticalMultiplier)); + } + + if (ends_with(inputFile, ".vpc")) + { + // for /tmp/hello.vpc we will use /tmp/hello dir for all results + fs::path outputParentDir = fs::path(outputFile).parent_path(); + fs::path outputSubdir = outputParentDir / fs::path(outputFile).stem(); + fs::create_directories(outputSubdir); + + // VPC handling + VirtualPointCloud vpc; + if (!vpc.read(inputFile)) + return; + + for (const VirtualPointCloud::File& f : vpc.files) + { + ParallelJobInfo tile(ParallelJobInfo::FileBased, BOX2D(), filterExpression, filterBounds); + tile.inputFilenames.push_back(f.filename); + + // for input file /x/y/z.las that goes to /tmp/hello.vpc, + // individual output file will be called /tmp/hello/z.las + fs::path inputBasename = fileStem(f.filename); + + if (!ends_with(outputFile, ".vpc")) + tile.outputFilename = (outputSubdir / inputBasename).string() + ".las"; + else + tile.outputFilename = (outputSubdir / inputBasename).string() + "." + outputFormat; + + tileOutputFiles.push_back(tile.outputFilename); + + pipelines.push_back(pipeline(&tile, noiseFilterOptions, removeNoisePoints)); + } + } + else + { + ParallelJobInfo tile(ParallelJobInfo::Single, BOX2D(), filterExpression, filterBounds); + tile.inputFilenames.push_back(inputFile); + tile.outputFilename = outputFile; + + pipelines.push_back(pipeline(&tile, noiseFilterOptions, removeNoisePoints)); + } +} + +void FilterNoise::finalize(std::vector>&) +{ + if (tileOutputFiles.empty()) + return; + + buildOutput(outputFile, tileOutputFiles); +} \ No newline at end of file diff --git a/external/pdal_wrench/height_above_ground.cpp b/external/pdal_wrench/height_above_ground.cpp new file mode 100644 index 000000000000..893561000150 --- /dev/null +++ b/external/pdal_wrench/height_above_ground.cpp @@ -0,0 +1,225 @@ +/***************************************************************************** + * Copyright (c) 2025, Lutra Consulting Ltd. and Hobu, Inc. * + * * + * All rights reserved. * + * * + * This program is free software; you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation; either version 3 of the License, or * + * (at your option) any later version. * + * * + ****************************************************************************/ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include + +#include "utils.hpp" +#include "alg.hpp" +#include "vpc.hpp" + +using namespace pdal; + +namespace fs = std::filesystem; + + +void HeightAboveGround::addArgs() +{ + argOutput = &programArgs.add("output,o", "Output point cloud file", outputFile); + argOutputFormat = &programArgs.add("output-format", "Output format (las/laz/copc)", outputFormat); + argAlgorithm = &programArgs.add("algorithm", "Height Above Ground algorithm to use: nn (Nearest Neighbor) or delaunay (Delaunay).", algorithm, "nn"); + argReplaceZWithHeightAboveGround = &programArgs.add("replace-z", "Replace Z dimension with height above ground (true/false).", replaceZWithHeightAboveGround, true); + + // args - NN + argNNCount = &programArgs.add("nn-count", "The number of ground neighbors to consider when determining the height above ground for a non-ground point", nnCount, 1); + argNNMaxDistance = &programArgs.add("nn-max-distance", "Use only ground points within max_distance of non-ground point when performing neighbor interpolation.", nnMaxDistance, 0); + + // args - Delaunay + argDelaunayCount = &programArgs.add("delaunay-count", "The number of ground neighbors to consider when determining the height above ground for a non-ground point.", delaunayCount, 10); +} + +bool HeightAboveGround::checkArgs() +{ + if (!argOutput->set()) + { + std::cerr << "missing output" << std::endl; + return false; + } + + if (argOutputFormat->set()) + { + if (outputFormat != "las" && outputFormat != "laz" && outputFormat != "copc") + { + std::cerr << "unknown output format: " << outputFormat << std::endl; + return false; + } + } + else + outputFormat = "las"; // uncompressed by default + + if (!argAlgorithm->set()) + { + std::cerr << "missing algorithm" << std::endl; + return false; + } + else + { + if (!(algorithm == "nn" || algorithm == "delaunay")) + { + std::cerr << "unknown algorithm: " << algorithm << std::endl; + return false; + } + } + + if (algorithm == "delaunay" && (argNNMaxDistance->set() || argNNCount->set())) + { + std::cout << "nn-* arguments are not supported with delaunay algorithm" << std::endl; + } + + if (algorithm == "nn" && (argDelaunayCount->set())) + { + std::cout << "delaunay-count argument is not supported with nn algorithm" << std::endl; + } + + return true; +} + + +static std::unique_ptr pipeline(ParallelJobInfo *tile, std::string algorithm, bool replaceZWithHeightAboveGround, int nnCount, double nnMaxDistance, int delaunayCount) +{ + std::unique_ptr manager( new PipelineManager ); + + Options reader_opts; + + Stage& r = makeReader( manager.get(), tile->inputFilenames[0], reader_opts ); + + Stage *last = &r; + + // filtering + if (!tile->filterBounds.empty()) + { + Options filter_opts; + filter_opts.add(pdal::Option("bounds", tile->filterBounds)); + + if (readerSupportsBounds(r)) + { + // Reader of the format can do the filtering - use that whenever possible! + r.addOptions(filter_opts); + } + else + { + // Reader can't do the filtering - do it with a filter + last = &manager->makeFilter( "filters.crop", *last, filter_opts); + } + } + + if (!tile->filterExpression.empty()) + { + Options filter_opts; + filter_opts.add(pdal::Option("expression", tile->filterExpression)); + last = &manager->makeFilter( "filters.expression", *last, filter_opts); + } + + // NN HAG filter + if (algorithm == "nn") + { + Options hag_nn_opts; + + if (nnCount > 1) + { + hag_nn_opts.add(pdal::Option("count", nnCount)); + } + + if (nnMaxDistance > 0) + { + hag_nn_opts.add(pdal::Option("max_distance", nnMaxDistance)); + } + + last = &manager->makeFilter( "filters.hag_nn", *last, hag_nn_opts); + } + + // Delaunay HAG filter + if (algorithm == "delaunay") + { + Options hag_delaunay_opts; + + if (delaunayCount > 0) + { + hag_delaunay_opts.add(pdal::Option("count", delaunayCount)); + } + + last = &manager->makeFilter( "filters.hag_delaunay", *last, hag_delaunay_opts); + } + + if (replaceZWithHeightAboveGround) + { + pdal::Options ferry_opts; + ferry_opts.add(pdal::Option("dimensions", "HeightAboveGround=>Z")); + + last = &manager->makeFilter( "filters.ferry", *last, ferry_opts); + } + + makeWriter( manager.get(), tile->outputFilename, last); + + return manager; +} + + +void HeightAboveGround::preparePipelines(std::vector>& pipelines) +{ + if (ends_with(inputFile, ".vpc")) + { + // for /tmp/hello.vpc we will use /tmp/hello dir for all results + fs::path outputParentDir = fs::path(outputFile).parent_path(); + fs::path outputSubdir = outputParentDir / fs::path(outputFile).stem(); + fs::create_directories(outputSubdir); + + // VPC handling + VirtualPointCloud vpc; + if (!vpc.read(inputFile)) + return; + + for (const VirtualPointCloud::File& f : vpc.files) + { + ParallelJobInfo tile(ParallelJobInfo::FileBased, BOX2D(), filterExpression, filterBounds); + tile.inputFilenames.push_back(f.filename); + + // for input file /x/y/z.las that goes to /tmp/hello.vpc, + // individual output file will be called /tmp/hello/z.las + fs::path inputBasename = fileStem(f.filename); + + if (!ends_with(outputFile, ".vpc")) + tile.outputFilename = (outputSubdir / inputBasename).string() + ".las"; + else + tile.outputFilename = (outputSubdir / inputBasename).string() + "." + outputFormat; + + tileOutputFiles.push_back(tile.outputFilename); + + pipelines.push_back(pipeline(&tile, algorithm, replaceZWithHeightAboveGround, nnCount, nnMaxDistance, delaunayCount)); + } + } + else + { + ParallelJobInfo tile(ParallelJobInfo::Single, BOX2D(), filterExpression, filterBounds); + tile.inputFilenames.push_back(inputFile); + tile.outputFilename = outputFile; + pipelines.push_back(pipeline(&tile, algorithm, replaceZWithHeightAboveGround, nnCount, nnMaxDistance, delaunayCount)); + } +} + +void HeightAboveGround::finalize(std::vector>&) +{ + if (tileOutputFiles.empty()) + return; + + buildOutput(outputFile, tileOutputFiles); +} diff --git a/external/pdal_wrench/main.cpp b/external/pdal_wrench/main.cpp index f4f2ff441650..851ca12ba081 100644 --- a/external/pdal_wrench/main.cpp +++ b/external/pdal_wrench/main.cpp @@ -20,31 +20,39 @@ #include #include +#include + +#include "gdal_version.h" #include "alg.hpp" #include "vpc.hpp" extern int runTile(std::vector arglist); // tile/tile.cpp +std::string WRENCH_VERSION = "1.3.0"; void printUsage() { std::cout << "usage: pdal_wrench []" << std::endl; std::cout << " pdal_wrench [--help]" << std::endl; + std::cout << " pdal_wrench [--version]" << std::endl; std::cout << std::endl; std::cout << "Available commands:" << std::endl; - std::cout << " boundary Exports a polygon file containing boundary" << std::endl; - std::cout << " build_vpc Creates a virtual point cloud" << std::endl; - std::cout << " clip Outputs only points that are inside of the clipping polygons" << std::endl; - std::cout << " density Exports a raster where each cell contains number of points" << std::endl; - std::cout << " info Prints basic metadata from the point cloud file" << std::endl; - std::cout << " merge Merges multiple point cloud files to a single one" << std::endl; - std::cout << " thin Creates a thinned version of the point cloud (with fewer points)" << std::endl; - std::cout << " tile Creates square tiles from input data" << std::endl; - std::cout << " to_raster Exports point cloud data to a 2D raster grid" << std::endl; - std::cout << " to_raster_tin Exports point cloud data to a 2D raster grid using triangulation" << std::endl; - std::cout << " to_vector Exports point cloud data to a vector layer with 3D points" << std::endl; - std::cout << " translate Converts to a different file format, reproject, and more" << std::endl; + std::cout << " boundary Exports a polygon file containing boundary" << std::endl; + std::cout << " build_vpc Creates a virtual point cloud" << std::endl; + std::cout << " classify_ground Classify ground points" << std::endl; + std::cout << " clip Outputs only points that are inside of the clipping polygons" << std::endl; + std::cout << " density Exports a raster where each cell contains number of points" << std::endl; + std::cout << " filter_noise Classify noise points" << std::endl; + std::cout << " height_above_ground Calculates height above ground for each point" << std::endl; + std::cout << " info Prints basic metadata from the point cloud file" << std::endl; + std::cout << " merge Merges multiple point cloud files to a single one" << std::endl; + std::cout << " thin Creates a thinned version of the point cloud (with fewer points)" << std::endl; + std::cout << " tile Creates square tiles from input data" << std::endl; + std::cout << " to_raster Exports point cloud data to a 2D raster grid" << std::endl; + std::cout << " to_raster_tin Exports point cloud data to a 2D raster grid using triangulation" << std::endl; + std::cout << " to_vector Exports point cloud data to a vector layer with 3D points" << std::endl; + std::cout << " translate Converts to a different file format, reproject, and more" << std::endl; } @@ -69,6 +77,10 @@ int main(int argc, char* argv[]) { printUsage(); } + else if (cmd == "--version" || cmd == "version") + { + std::cout << "pdal_wrench version: " << WRENCH_VERSION << " (PDAL version: " << pdal::Config::versionString() << ", GDAL version: "<< GDAL_RELEASE_NAME << ")" << std::endl; + } else if (cmd == "density") { Density density; @@ -123,10 +135,25 @@ int main(int argc, char* argv[]) Translate translate; runAlg(args, translate); } + else if (cmd == "height_above_ground") + { + HeightAboveGround heightAboveGround; + runAlg(args, heightAboveGround); + } else if (cmd == "tile") { runTile(args); } + else if (cmd == "filter_noise") + { + FilterNoise filterNoise; + runAlg(args, filterNoise); + } + else if (cmd == "classify_ground") + { + ClassifyGround classifyGround; + runAlg(args, classifyGround); + } else { std::cerr << "unknown command: " << cmd << std::endl; diff --git a/external/pdal_wrench/merge.cpp b/external/pdal_wrench/merge.cpp index 64e4e58be76d..b4aa03300048 100644 --- a/external/pdal_wrench/merge.cpp +++ b/external/pdal_wrench/merge.cpp @@ -61,7 +61,8 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile) std::vector readers; for (const std::string& f : tile->inputFilenames) { - readers.push_back(&manager->makeReader(f, "")); + Stage& reader = makeReader( manager.get(), f ); + readers.push_back(&reader); } std::vector last = readers; @@ -111,9 +112,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile) last.push_back(merge); } - pdal::Options options; - options.add(pdal::Option("forward", "all")); - Stage* writer = &manager->makeWriter(tile->outputFilename, "", options); + Stage* writer = &makeWriter(manager.get(), tile->outputFilename, nullptr); for (Stage *s : last) writer->setInput(*s); @@ -123,6 +122,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile) void Merge::preparePipelines(std::vector>& pipelines) { ParallelJobInfo tile(ParallelJobInfo::Single, BOX2D(), filterExpression, filterBounds); + std::vector inputFilesToProcess; if (!inputFileList.empty()) { std::ifstream inputFile(inputFileList); @@ -135,36 +135,37 @@ void Merge::preparePipelines(std::vector>& pipe while (std::getline(inputFile, line)) { - inputFiles.push_back(line); + inputFilesToProcess.push_back(line); } } - std::vector vpcFilesToRemove; - vpcFilesToRemove.reserve(inputFiles.size()); + inputFiles.reserve(inputFilesToProcess.size()); - for (const std::string& inputFile : inputFiles) - { + std::function processInputFile; + processInputFile = [&processInputFile,this](const std::string& inputFile) { if (ends_with(inputFile, ".vpc")) - { - vpcFilesToRemove.push_back(inputFile); - + { VirtualPointCloud vpc; if (!vpc.read(inputFile)) { std::cerr << "could not open input VPC: " << inputFile << std::endl; return; } - + for (const VirtualPointCloud::File& vpcSingleFile : vpc.files) { - inputFiles.push_back(vpcSingleFile.filename); + processInputFile(vpcSingleFile.filename); } } - } + else + { + inputFiles.push_back(inputFile); + } + }; - for (const std::string& f : vpcFilesToRemove) + for (const std::string& inputFile : inputFilesToProcess) { - inputFiles.erase(std::remove(inputFiles.begin(), inputFiles.end(), f), inputFiles.end()); + processInputFile(inputFile); } tile.inputFilenames = inputFiles; diff --git a/external/pdal_wrench/thin.cpp b/external/pdal_wrench/thin.cpp index 2b5a3644fa28..c8a5b1aebbd2 100644 --- a/external/pdal_wrench/thin.cpp +++ b/external/pdal_wrench/thin.cpp @@ -97,7 +97,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, std::str { std::unique_ptr manager( new PipelineManager ); - Stage& r = manager->makeReader( tile->inputFilenames[0], ""); + Stage& r = makeReader(manager.get(), tile->inputFilenames[0]); Stage *last = &r; @@ -138,9 +138,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, std::str last = &manager->makeFilter( "filters.sample", *last, sample_opts ); } - pdal::Options writer_opts; - writer_opts.add(pdal::Option("forward", "all")); // TODO: maybe we could use lower scale than the original - manager->makeWriter( tile->outputFilename, "", *last, writer_opts); + makeWriter(manager.get(), tile->outputFilename, last); return manager; } diff --git a/external/pdal_wrench/tile/FileDimInfo.hpp b/external/pdal_wrench/tile/FileDimInfo.hpp index d7f19933f997..4a6ba0efdca2 100644 --- a/external/pdal_wrench/tile/FileDimInfo.hpp +++ b/external/pdal_wrench/tile/FileDimInfo.hpp @@ -22,14 +22,14 @@ struct FileDimInfo FileDimInfo() {} - FileDimInfo(const std::string& name) : name(name), extraDim(false) + FileDimInfo(const std::string& name) : name(name) {} std::string name; - pdal::Dimension::Type type; - int offset; - pdal::Dimension::Id dim; - bool extraDim; + pdal::Dimension::Type type {pdal::Dimension::Type::None}; + int offset {0}; + pdal::Dimension::Id dim {pdal::Dimension::Id::Unknown}; + bool extraDim {false}; }; using DimInfoList = std::vector; diff --git a/external/pdal_wrench/tile/README.md b/external/pdal_wrench/tile/README.md new file mode 100644 index 000000000000..266b6f62879e --- /dev/null +++ b/external/pdal_wrench/tile/README.md @@ -0,0 +1,14 @@ + +Implements tiling of point clouds in two passes: +1. Read input files and write raw point data to files in a temporary directory +2. Write tiles as LAS/LAZ files from the temp point data files + +The first pass is entirely based on untwine's "epf" implementation, with only +minor changes to grid/voxel structure to accommodate tiling requirements +(fixed tile edge size, only using X/Y dimensions for tile keys, single level). +Using commit `66cafb` as a base of the fork. + +Single pass tiling can be done with "pdal tile" kernel, but it can easily run out +of open files (it keeps all output LAS/LAZ files open until it is finished). + +License: GPL3+ diff --git a/external/pdal_wrench/to_raster.cpp b/external/pdal_wrench/to_raster.cpp index 76b8d60daf89..3467a55e1de3 100644 --- a/external/pdal_wrench/to_raster.cpp +++ b/external/pdal_wrench/to_raster.cpp @@ -83,7 +83,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, double r std::vector readers; for (const std::string &f : tile->inputFilenames) { - readers.push_back(&manager->makeReader(f, "")); + readers.push_back(&makeReader(manager.get(), f)); } std::vector last = readers; diff --git a/external/pdal_wrench/to_vector.cpp b/external/pdal_wrench/to_vector.cpp index 3bdd82b45e01..6ba04944478b 100644 --- a/external/pdal_wrench/to_vector.cpp +++ b/external/pdal_wrench/to_vector.cpp @@ -53,7 +53,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, const st { std::unique_ptr manager( new PipelineManager ); - Stage& r = manager->makeReader( tile->inputFilenames[0], ""); + Stage& r = makeReader(manager.get(), tile->inputFilenames[0]); Stage *last = &r; diff --git a/external/pdal_wrench/translate.cpp b/external/pdal_wrench/translate.cpp index 2d92948d1720..7055f48f6b24 100644 --- a/external/pdal_wrench/translate.cpp +++ b/external/pdal_wrench/translate.cpp @@ -40,6 +40,9 @@ void Translate::addArgs() programArgs.add("transform-coord-op", "Details on how to do the transform of coordinates when --transform-crs is used. " "It can be a PROJ pipeline or a WKT2 CoordinateOperation. " "When not specified, PROJ will pick the default transform.", transformCoordOp); + programArgs.add("transform-matrix", "A whitespace-delimited transformation matrix. " + "The matrix is assumed to be presented in row-major order. " + "Only matrices with sixteen elements are allowed.", transformMatrix); } bool Translate::checkArgs() @@ -72,7 +75,7 @@ bool Translate::checkArgs() } -static std::unique_ptr pipeline(ParallelJobInfo *tile, std::string assignCrs, std::string transformCrs, std::string transformCoordOp) +static std::unique_ptr pipeline(ParallelJobInfo *tile, std::string assignCrs, std::string transformCrs, std::string transformCoordOp, std::string transformMatrix) { std::unique_ptr manager( new PipelineManager ); @@ -80,7 +83,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, std::str if (!assignCrs.empty()) reader_opts.add(pdal::Option("override_srs", assignCrs)); - Stage& r = manager->makeReader( tile->inputFilenames[0], "", reader_opts); + Stage& r = makeReader(manager.get(), tile->inputFilenames[0], reader_opts); Stage *last = &r; @@ -126,6 +129,14 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, std::str last = reproject; } + if (!transformMatrix.empty()) + { + Options matrix_opts; + matrix_opts.add(pdal::Option("matrix", transformMatrix)); + Stage* matrixTransform = &manager->makeFilter( "filters.transformation", *last, matrix_opts); + last = matrixTransform; + } + pdal::Options writer_opts; if (!reproject) { @@ -142,7 +153,7 @@ static std::unique_ptr pipeline(ParallelJobInfo *tile, std::str writer_opts.add(pdal::Option("offset_z", "auto")); } - (void)manager->makeWriter( tile->outputFilename, "", *last, writer_opts); + makeWriter(manager.get(), tile->outputFilename, last, writer_opts); return manager; } @@ -178,7 +189,7 @@ void Translate::preparePipelines(std::vector>& tileOutputFiles.push_back(tile.outputFilename); - pipelines.push_back(pipeline(&tile, assignCrs, transformCrs, transformCoordOp)); + pipelines.push_back(pipeline(&tile, assignCrs, transformCrs, transformCoordOp, transformMatrix)); } } else @@ -190,7 +201,7 @@ void Translate::preparePipelines(std::vector>& ParallelJobInfo tile(ParallelJobInfo::Single, BOX2D(), filterExpression, filterBounds); tile.inputFilenames.push_back(inputFile); tile.outputFilename = outputFile; - pipelines.push_back(pipeline(&tile, assignCrs, transformCrs, transformCoordOp)); + pipelines.push_back(pipeline(&tile, assignCrs, transformCrs, transformCoordOp, transformMatrix)); } } @@ -199,28 +210,5 @@ void Translate::finalize(std::vector>&) if (tileOutputFiles.empty()) return; - std::vector args; - args.push_back("--output=" + outputFile); - for (std::string f : tileOutputFiles) - args.push_back(f); - - if (ends_with(outputFile, ".vpc")) - { - // now build a new output VPC - buildVpc(args); - } - else - { - // merge all the output files into a single file - Merge merge; - // for copc set isStreaming to false - if (ends_with(outputFile, ".copc.laz")) - { - merge.isStreaming = false; - } - runAlg(args, merge); - - // remove files as they are not needed anymore - they are merged - removeFiles(tileOutputFiles, true); - } + buildOutput(outputFile, tileOutputFiles); } diff --git a/external/pdal_wrench/utils.cpp b/external/pdal_wrench/utils.cpp index 23909e9d965e..83159b9acb27 100644 --- a/external/pdal_wrench/utils.cpp +++ b/external/pdal_wrench/utils.cpp @@ -23,6 +23,9 @@ #include +#include "vpc.hpp" +#include "alg.hpp" + using namespace pdal; @@ -119,14 +122,29 @@ void runPipelineParallel(point_count_t totalPoints, bool isStreaming, std::vecto p.add([pipeline]() { MyTable table(CHUNK_SIZE); - pipeline->executeStream(table); - + try + { + pipeline->executeStream(table); + } + catch ( pdal::pdal_error& e ) + { + std::cerr << "Error in wrench execution: " << e.what() << std::endl; + std::exit(EXIT_FAILURE); + } }); } else { p.add([pipeline, &pipelines, i]() { - pipeline->execute(); + try + { + pipeline->execute(); + } + catch ( pdal::pdal_error& e ) + { + std::cerr << "Error in wrench execution: " << e.what() << std::endl; + std::exit(EXIT_FAILURE); + } pipelines[i].reset(); // to free the point table and views (meshes, rasters) sProgressBar.add(); }); @@ -262,3 +280,79 @@ BOX2D intersectTileBoxWithFilterBox(const BOX2D &tileBox, const BOX2D &filterBox return BOX2D(); // invalid box } } + +pdal::Stage &makeReader(pdal::PipelineManager *manager, const std::string &inputFile, pdal::Options options) +{ + pdal::Stage &reader = manager->makeReader( inputFile, "" ); + + pdal::Options reader_opts; + + // for LAS/LAZ files if the version 1.2, the extra dimensions will not be read, + // need to enable use_eb_vlr to read those dimensions + if (!ends_with(inputFile, ".copc.laz") && (ends_with(inputFile, ".laz") || ends_with(inputFile, ".las"))) + { + reader_opts.add(pdal::Option("use_eb_vlr", true)); + } + + reader_opts.add(options); + + reader.addOptions(reader_opts); + + return reader; +} + +pdal::Stage &makeWriter(pdal::PipelineManager *manager, const std::string &outputFile, pdal::Stage *parent, pdal::Options options) +{ + pdal::Stage *writerPtr = nullptr; + if (parent) + { + writerPtr = &manager->makeWriter(outputFile, "", *parent); + } + else + { + writerPtr = &manager->makeWriter(outputFile, ""); + } + + pdal::Stage &writer = *writerPtr; + + // these are for writers.las and writers.copc to forward all dimensions + // if other writers are to be supported, this needs to be adjusted or make specific based on outputFile extension + pdal::Options writer_opts; + writer_opts.add(pdal::Option("forward", "all")); + writer_opts.add(pdal::Option("extra_dims", "all")); + + writer_opts.add(options); + + writer.addOptions(writer_opts); + + return writer; +} + +void buildOutput(std::string outputFile, std::vector &tileOutputFiles) +{ + std::vector args; + args.push_back("--output=" + outputFile); + for (std::string f : tileOutputFiles) + args.push_back(f); + + if (ends_with(outputFile, ".vpc")) + { + // now build a new output VPC + buildVpc(args); + } + else + { + // merge all the output files into a single file + Merge merge; + // for copc set isStreaming to false + if (ends_with(outputFile, ".copc.laz")) + { + merge.isStreaming = false; + } + + runAlg(args, merge); + + // remove files as they are not needed anymore - they are merged + removeFiles(tileOutputFiles, true); + } +} \ No newline at end of file diff --git a/external/pdal_wrench/utils.hpp b/external/pdal_wrench/utils.hpp index e919b80b71d0..4249c38086b8 100644 --- a/external/pdal_wrench/utils.hpp +++ b/external/pdal_wrench/utils.hpp @@ -49,9 +49,9 @@ struct Tiling // where N,M are some integer values struct TileAlignment { - double originX; - double originY; - double tileSize; + double originX {-1}; + double originY {-1}; + double tileSize {1000}; // returns tiling that fully covers given bounding box, using this tile alignment Tiling coverBounds(const BOX2D &box) const @@ -259,3 +259,19 @@ inline std::string join_strings(const std::vector& list, char delim bool rasterTilesToCog(const std::vector &inputFiles, const std::string &outputFile); + +/** + * Create reader stage with some default options. + */ +pdal::Stage &makeReader( pdal::PipelineManager *manager, const std::string &inputFile, pdal::Options options = pdal::Options() ); + +/** + * Create writer stage with some default options. + */ +pdal::Stage &makeWriter(pdal::PipelineManager *manager, const std::string &outputFile, pdal::Stage *parent, pdal::Options options = pdal::Options() ); + +/** + * Handle saving output for multiple tiles if the output is VPC or the data need to be merged. + */ +void buildOutput(std::string outputFile, std::vector &tileOutputFiles); + diff --git a/external/pdal_wrench/vpc.cpp b/external/pdal_wrench/vpc.cpp index 70dac07cefbc..dd78477743cc 100644 --- a/external/pdal_wrench/vpc.cpp +++ b/external/pdal_wrench/vpc.cpp @@ -87,83 +87,91 @@ bool VirtualPointCloud::read(std::string filename) std::set vpcCrsWkt; - for (auto& f : data["features"]) + try { - if (!f.contains("type") || f["type"] != "Feature" || - !f.contains("stac_version") || - !f.contains("assets") || !f["assets"].is_object() || - !f.contains("properties") || !f["properties"].is_object()) - { - std::cerr << "Malformed STAC item: " << f << std::endl; - continue; - } - - if (f["stac_version"] != "1.0.0") + for (auto& f : data["features"]) { - std::cerr << "Unsupported STAC version: " << f["stac_version"] << std::endl; - continue; - } + if (!f.contains("type") || f["type"] != "Feature" || + !f.contains("stac_version") || + !f.contains("assets") || !f["assets"].is_object() || + !f.contains("properties") || !f["properties"].is_object()) + { + std::cerr << "Malformed STAC item: " << f << std::endl; + continue; + } - nlohmann::json firstAsset = *f["assets"].begin(); + if (f["stac_version"] != "1.0.0") + { + std::cerr << "Unsupported STAC version: " << f["stac_version"] << std::endl; + continue; + } - File vpcf; - vpcf.filename = firstAsset["href"]; - vpcf.count = f["properties"]["pc:count"]; - vpcf.crsWkt = f["properties"]["proj:wkt2"]; - vpcCrsWkt.insert(vpcf.crsWkt); + nlohmann::json firstAsset = *f["assets"].begin(); - // read boundary geometry - nlohmann::json nativeGeometry = f["properties"]["proj:geometry"]; - std::stringstream sstream; - sstream << std::setw(2) << nativeGeometry << std::endl; - std::string wkt = sstream.str(); - pdal::Geometry nativeGeom(sstream.str()); - vpcf.boundaryWkt = nativeGeom.wkt(); + File vpcf; + vpcf.filename = firstAsset["href"]; + vpcf.count = f["properties"]["pc:count"]; + vpcf.crsWkt = f["properties"]["proj:wkt2"]; + vpcCrsWkt.insert(vpcf.crsWkt); - nlohmann::json nativeBbox = f["properties"]["proj:bbox"]; - vpcf.bbox = BOX3D( - nativeBbox[0].get(), nativeBbox[1].get(), nativeBbox[2].get(), - nativeBbox[3].get(), nativeBbox[4].get(), nativeBbox[5].get() ); + // read boundary geometry + nlohmann::json nativeGeometry = f["properties"]["proj:geometry"]; + std::stringstream sstream; + sstream << std::setw(2) << nativeGeometry << std::endl; + std::string wkt = sstream.str(); + pdal::Geometry nativeGeom(sstream.str()); + vpcf.boundaryWkt = nativeGeom.wkt(); - if (vpcf.filename.substr(0, 2) == "./") - { - // resolve relative path - vpcf.filename = fs::weakly_canonical(filenameParent / vpcf.filename).string(); - } + nlohmann::json nativeBbox = f["properties"]["proj:bbox"]; + vpcf.bbox = BOX3D( + nativeBbox[0].get(), nativeBbox[1].get(), nativeBbox[2].get(), + nativeBbox[3].get(), nativeBbox[4].get(), nativeBbox[5].get() ); - for (auto &schemaItem : f["properties"]["pc:schemas"]) - { - vpcf.schema.push_back(VirtualPointCloud::SchemaItem(schemaItem["name"], schemaItem["type"], schemaItem["size"].get())); - } + if (vpcf.filename.substr(0, 2) == "./") + { + // resolve relative path + vpcf.filename = fs::weakly_canonical(filenameParent / vpcf.filename).string(); + } - // read stats - for (auto &statsItem : f["properties"]["pc:statistics"]) - { - vpcf.stats.push_back(VirtualPointCloud::StatsItem( - statsItem["name"], - statsItem["position"], - statsItem["average"], - statsItem["count"], - statsItem["maximum"], - statsItem["minimum"], - statsItem["stddev"], - statsItem["variance"])); - } + for (auto &schemaItem : f["properties"]["pc:schemas"]) + { + vpcf.schema.push_back(VirtualPointCloud::SchemaItem(schemaItem["name"], schemaItem["type"], schemaItem["size"].get())); + } - // read overview file (if any, expecting at most one) - // this logic is very basic, we should be probably checking roles of assets - if (f["assets"].contains("overview")) - { - vpcf.overviewFilename = f["assets"]["overview"]["href"]; + // read stats + for (auto &statsItem : f["properties"]["pc:statistics"]) + { + vpcf.stats.push_back(VirtualPointCloud::StatsItem( + statsItem["name"], + statsItem["position"], + statsItem["average"], + statsItem["count"], + statsItem["maximum"], + statsItem["minimum"], + statsItem["stddev"], + statsItem["variance"])); + } - if (vpcf.overviewFilename.substr(0, 2) == "./") + // read overview file (if any, expecting at most one) + // this logic is very basic, we should be probably checking roles of assets + if (f["assets"].contains("overview")) { - // resolve relative path - vpcf.overviewFilename = fs::weakly_canonical(filenameParent / vpcf.overviewFilename).string(); + vpcf.overviewFilename = f["assets"]["overview"]["href"]; + + if (vpcf.overviewFilename.substr(0, 2) == "./") + { + // resolve relative path + vpcf.overviewFilename = fs::weakly_canonical(filenameParent / vpcf.overviewFilename).string(); + } } - } - files.push_back(vpcf); + files.push_back(vpcf); + } + } + catch ( nlohmann::detail::invalid_iterator& e ) + { + std::cerr << "Invalid 'features' value in a VPC file: " << e.what() << std::endl; + return false; } if (vpcCrsWkt.size() == 1) @@ -563,7 +571,7 @@ void buildVpc(std::vector args) { std::unique_ptr manager( new PipelineManager ); - Stage* last = &manager->makeReader(f.filename, ""); + Stage* last = &makeReader(manager.get(), f.filename); if (boundaries) { pdal::Options hexbin_opts; @@ -590,9 +598,7 @@ void buildVpc(std::vector args) std::string overviewOutput = overviewFilenameBase + "-overview-tmp-" + std::to_string(++overviewCounter) + ".las"; overviewTempFiles.push_back(overviewOutput); - pdal::Options writer_opts; - writer_opts.add(pdal::Option("forward", "all")); // TODO: maybe we could use lower scale than the original - manager->makeWriter(overviewOutput, "", *last, writer_opts); + makeWriter(manager.get(), overviewOutput, last); } pipelines.push_back(std::move(manager)); @@ -623,7 +629,7 @@ void buildVpc(std::vector args) for (const std::string &overviewTempFile : overviewTempFiles) { - Stage& reader = manager->makeReader(overviewTempFile, ""); + Stage& reader = makeReader(manager.get(), overviewTempFile); merge.setInput(reader); } diff --git a/src/providers/pdal/CMakeLists.txt b/src/providers/pdal/CMakeLists.txt index 05c49ec6cfa7..edcef5b320b9 100644 --- a/src/providers/pdal/CMakeLists.txt +++ b/src/providers/pdal/CMakeLists.txt @@ -155,6 +155,9 @@ set(PDAL_WRENCH_SRCS ${CMAKE_SOURCE_DIR}/external/pdal_wrench/translate.cpp ${CMAKE_SOURCE_DIR}/external/pdal_wrench/utils.cpp ${CMAKE_SOURCE_DIR}/external/pdal_wrench/vpc.cpp + ${CMAKE_SOURCE_DIR}/external/pdal_wrench/classify_ground.cpp + ${CMAKE_SOURCE_DIR}/external/pdal_wrench/filter_noise.cpp + ${CMAKE_SOURCE_DIR}/external/pdal_wrench/height_above_ground.cpp ${CMAKE_SOURCE_DIR}/external/pdal_wrench/tile/tile.cpp ${CMAKE_SOURCE_DIR}/external/pdal_wrench/tile/BufferCache.cpp