diff --git a/CHANGELOG.md b/CHANGELOG.md index 49a30906..01587ab4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,24 @@ All notable changes to PGS will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Dates are *YYYY-MM-DD*. +## **2.2** *(2025-xx-xx)* + +### Added +* `fixBrokenFaces()` to `PGS_Meshing`. Repairs broken faces in near-coverage linework using endpoint-only snapping, then polygonises the result. +* `polygonize()` to `PGS_Processing`. Finds polygonal faces from the given shape's linework. +* `softCells()` to `PGS_Tiling`. Generates a softened (curved) version of a tiling using the SoftCells edge-bending algorithm. + +### Changes +* These methods in `PGS_Meshing` are more performant and robust: `urquhartFaces()`, `gabrielFaces()`, `spannerFaces()`, `relativeNeighborFaces()`, `edgeCollapseQuadrangulation()`, `centroidQuadrangulation()`. + +### Fixed +* `PGS_Optimisation.closestPoint()` now returns the nearest location on the shape's **boundary** for queries inside a polygonal shape (previously returned the query point itself). +* `GENETIC` mesh-coloring algorithm now always works (and has been improved too). +* `PGS_Morphology.reducePrecision()` now supports GROUP shapes without collapsing them. + +### Removed +* `polygonizeLines()` from `PGS_Processing`, in favour of `polygonize(PShape)`. + ## **2.1** *(2025-10-04)* ### Added diff --git a/README.md b/README.md index 2750eb4f..3b240d90 100644 --- a/README.md +++ b/README.md @@ -957,8 +957,10 @@ Much of the functionality (but by no means all) is demonstrated below: Arc Division + Soft Cells + diff --git a/pom.xml b/pom.xml index d111c439..f38db8ac 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 micycle PGS - 2.1 + 2.2-SNAPSHOT Processing Geometry Suite Geometric algorithms for Processing @@ -371,7 +371,7 @@ com.github.micycle1 GeoBlitz - 0.9 + 0.9.1 diff --git a/resources/tiling/softCells.gif b/resources/tiling/softCells.gif new file mode 100644 index 00000000..08d83925 Binary files /dev/null and b/resources/tiling/softCells.gif differ diff --git a/src/main/java/micycle/pgs/PGS_Coloring.java b/src/main/java/micycle/pgs/PGS_Coloring.java index c44b1312..43971836 100644 --- a/src/main/java/micycle/pgs/PGS_Coloring.java +++ b/src/main/java/micycle/pgs/PGS_Coloring.java @@ -254,7 +254,7 @@ private static Coloring findColoring(Collection shapes, Coloring coloring = new ColorRefinementAlgorithm<>(graph).getColoring(); break; case GENETIC : - coloring = new GeneticColoring<>(graph).getColoring(); + coloring = new GeneticColoring<>(graph, SEED).getColoring(); break; case RLF_BRUTE_FORCE_4COLOR : int iterations = 0; diff --git a/src/main/java/micycle/pgs/PGS_Construction.java b/src/main/java/micycle/pgs/PGS_Construction.java index 0299f0a4..06250433 100644 --- a/src/main/java/micycle/pgs/PGS_Construction.java +++ b/src/main/java/micycle/pgs/PGS_Construction.java @@ -1365,7 +1365,7 @@ static Polygon createCircle(double x, double y, double r, final double maxDeviat int nPts = (int) Math.ceil(2 * Math.PI / Math.acos(1 - maxDeviation / r)); nPts = Math.max(nPts, 21); // min of 21 points for tiny circles final int circumference = (int) (Math.PI * r * 2); - if (nPts > circumference * 2) { + if (nPts > circumference * 2 && circumference > 0) { // AT MOST 1 point every half pixel nPts = circumference * 2; } diff --git a/src/main/java/micycle/pgs/PGS_Meshing.java b/src/main/java/micycle/pgs/PGS_Meshing.java index 2297e9a4..7c245799 100644 --- a/src/main/java/micycle/pgs/PGS_Meshing.java +++ b/src/main/java/micycle/pgs/PGS_Meshing.java @@ -2,49 +2,44 @@ import static micycle.pgs.PGS_Conversion.fromPShape; import static micycle.pgs.PGS_Conversion.getChildren; +import static micycle.pgs.PGS_Conversion.toPShape; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; + import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.math3.random.RandomGenerator; import org.jgrapht.alg.connectivity.ConnectivityInspector; import org.jgrapht.alg.interfaces.MatchingAlgorithm; -import org.jgrapht.alg.interfaces.VertexColoringAlgorithm.Coloring; import org.jgrapht.alg.matching.blossom.v5.KolmogorovWeightedMatching; import org.jgrapht.alg.matching.blossom.v5.KolmogorovWeightedPerfectMatching; import org.jgrapht.alg.matching.blossom.v5.ObjectiveSense; -import org.jgrapht.alg.spanning.GreedyMultiplicativeSpanner; import org.jgrapht.alg.util.NeighborCache; -import org.jgrapht.graph.AbstractBaseGraph; import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.SimpleGraph; -import org.locationtech.jts.algorithm.Orientation; import org.locationtech.jts.coverage.CoverageSimplifier; import org.locationtech.jts.coverage.CoverageValidator; import org.locationtech.jts.geom.Coordinate; -import org.locationtech.jts.geom.CoordinateList; import org.locationtech.jts.geom.Geometry; -import org.locationtech.jts.geom.Polygon; -import org.locationtech.jts.index.strtree.STRtree; import org.locationtech.jts.noding.SegmentString; -import org.locationtech.jts.operation.overlayng.OverlayNG; +import org.locationtech.jts.operation.polygonize.Polygonizer; import org.tinfour.common.IConstraint; import org.tinfour.common.IIncrementalTin; import org.tinfour.common.IQuadEdge; import org.tinfour.common.SimpleTriangle; import org.tinfour.common.Vertex; import org.tinfour.utils.TriangleCollector; -import org.tinspin.index.PointMap; -import org.tinspin.index.kdtree.KDTree; +import com.github.micycle1.geoblitz.EndpointSnapper; import com.vividsolutions.jcs.conflate.coverage.CoverageCleaner; import com.vividsolutions.jcs.conflate.coverage.CoverageCleaner.Parameters; import com.vividsolutions.jump.feature.FeatureCollection; @@ -56,10 +51,10 @@ import micycle.pgs.PGS_Conversion.PShapeData; import micycle.pgs.color.Colors; import micycle.pgs.commons.AreaMerge; +import micycle.pgs.commons.EdgePrunedFaces; import micycle.pgs.commons.IncrementalTinDual; import micycle.pgs.commons.PEdge; import micycle.pgs.commons.PMesh; -import micycle.pgs.commons.RLFColoring; import micycle.pgs.commons.SpiralQuadrangulation; import processing.core.PConstants; import processing.core.PShape; @@ -110,32 +105,7 @@ private PGS_Meshing() { * @see #gabrielFaces(IIncrementalTin, boolean) */ public static PShape urquhartFaces(final IIncrementalTin triangulation, final boolean preservePerimeter) { - final HashSet edges = PGS.makeHashSet(triangulation.getMaximumEdgeAllocationIndex()); - final HashSet uniqueLongestEdges = PGS.makeHashSet(triangulation.getMaximumEdgeAllocationIndex()); - - final boolean notConstrained = triangulation.getConstraints().isEmpty(); - - TriangleCollector.visitSimpleTriangles(triangulation, t -> { - final IConstraint constraint = t.getContainingRegion(); - if (notConstrained || (constraint != null && constraint.definesConstrainedRegion())) { - edges.add(t.getEdgeA().getBaseReference()); - edges.add(t.getEdgeB().getBaseReference()); - edges.add(t.getEdgeC().getBaseReference()); - final IQuadEdge longestEdge = findLongestEdge(t).getBaseReference(); - if (!preservePerimeter || (preservePerimeter && !longestEdge.isConstraintRegionBorder())) { - uniqueLongestEdges.add(longestEdge); - } - } - }); - - edges.removeAll(uniqueLongestEdges); - - final Collection meshEdges = new ArrayList<>(edges.size()); - edges.forEach(edge -> meshEdges.add(new PEdge(edge.getA().x, edge.getA().y, edge.getB().x, edge.getB().y))); - - PShape mesh = PGS.polygonizeNodedEdges(meshEdges); - - return removeHoles(mesh, triangulation); + return EdgePrunedFaces.urquhartFaces(triangulation, preservePerimeter); } /** @@ -162,42 +132,7 @@ public static PShape urquhartFaces(final IIncrementalTin triangulation, final bo * @see #urquhartFaces(IIncrementalTin, boolean) */ public static PShape gabrielFaces(final IIncrementalTin triangulation, final boolean preservePerimeter) { - final HashSet edges = new HashSet<>(); - final HashSet vertices = new HashSet<>(); - - final boolean notConstrained = triangulation.getConstraints().isEmpty(); - TriangleCollector.visitSimpleTriangles(triangulation, t -> { - final IConstraint constraint = t.getContainingRegion(); - if (notConstrained || (constraint != null && constraint.definesConstrainedRegion())) { - edges.add(t.getEdgeA().getBaseReference()); // add edge to set - edges.add(t.getEdgeB().getBaseReference()); // add edge to set - edges.add(t.getEdgeC().getBaseReference()); // add edge to set - vertices.add(t.getVertexA()); - vertices.add(t.getVertexB()); - vertices.add(t.getVertexC()); - } - }); - - final PointMap tree = KDTree.create(2); - vertices.forEach(v -> tree.insert(new double[] { v.x, v.y }, v)); - - final HashSet nonGabrielEdges = new HashSet<>(); // base references to edges that should be removed - edges.forEach(edge -> { - final double[] midpoint = midpoint(edge); - final Vertex near = tree.query1nn(midpoint).value(); - if (near != edge.getA() && near != edge.getB()) { - if (!preservePerimeter || (preservePerimeter && !edge.isConstraintRegionBorder())) { // don't remove constraint borders (holes) - nonGabrielEdges.add(edge); // base reference - } - } - }); - edges.removeAll(nonGabrielEdges); - - final Collection meshEdges = new ArrayList<>(edges.size()); - edges.forEach(edge -> meshEdges.add(new PEdge(edge.getA().x, edge.getA().y, edge.getB().x, edge.getB().y))); - - PShape mesh = PGS.polygonizeNodedEdges(meshEdges); - return removeHoles(mesh, triangulation); + return EdgePrunedFaces.gabrielFaces(triangulation, preservePerimeter); } /** @@ -217,37 +152,7 @@ public static PShape gabrielFaces(final IIncrementalTin triangulation, final boo * @since 1.3.0 */ public static PShape relativeNeighborFaces(final IIncrementalTin triangulation, final boolean preservePerimeter) { - SimpleGraph graph = PGS_Triangulation.toTinfourGraph(triangulation); - NeighborCache cache = new NeighborCache<>(graph); - - Set edges = new HashSet<>(graph.edgeSet()); - - /* - * If any vertex is nearer to both vertices of an edge, than the length of the - * edge, this edge does not belong in the RNG. - */ - graph.edgeSet().forEach(e -> { - double l = e.getLength(); - cache.neighborsOf(e.getA()).forEach(n -> { - if (Math.max(n.getDistance(e.getA()), n.getDistance(e.getB())) < l) { - if (!preservePerimeter || (preservePerimeter && !e.isConstraintRegionBorder())) { - edges.remove(e); - } - } - }); - cache.neighborsOf(e.getB()).forEach(n -> { - if (Math.max(n.getDistance(e.getA()), n.getDistance(e.getB())) < l) { - if (!preservePerimeter || (preservePerimeter && !e.isConstraintRegionBorder())) { - edges.remove(e); - } - } - }); - }); - - List edgesOut = edges.stream().map(PGS_Triangulation::toPEdge).collect(Collectors.toList()); - - PShape mesh = PGS.polygonizeNodedEdges(edgesOut); - return removeHoles(mesh, triangulation); + return EdgePrunedFaces.relativeNeighborFaces(triangulation, preservePerimeter); } /** @@ -265,26 +170,7 @@ public static PShape relativeNeighborFaces(final IIncrementalTin triangulation, * @since 1.3.0 */ public static PShape spannerFaces(final IIncrementalTin triangulation, int k, final boolean preservePerimeter) { - SimpleGraph graph = PGS_Triangulation.toGraph(triangulation); - if (graph.edgeSet().isEmpty()) { - return new PShape(); - } - - k = Math.max(2, k); // min(2) since k=1 returns triangulation - GreedyMultiplicativeSpanner spanner = new GreedyMultiplicativeSpanner<>(graph, k); - List spannerEdges = spanner.getSpanner().stream().collect(Collectors.toList()); - if (preservePerimeter) { - if (triangulation.getConstraints().isEmpty()) { // does not have constraints - spannerEdges.addAll(triangulation.getPerimeter().stream().map(PGS_Triangulation::toPEdge).collect(Collectors.toList())); - } else { // has constraints - spannerEdges.addAll(triangulation.getEdges().stream().filter(IQuadEdge::isConstraintRegionBorder).map(PGS_Triangulation::toPEdge) - .collect(Collectors.toList())); - } - } - - PShape mesh = PGS.polygonizeNodedEdges(spannerEdges); - - return removeHoles(mesh, triangulation); + return EdgePrunedFaces.spannerFaces(triangulation, k, preservePerimeter); } /** @@ -400,57 +286,7 @@ public static PShape splitQuadrangulation(final IIncrementalTin triangulation) { * similar approach, but faster */ public static PShape edgeCollapseQuadrangulation(final IIncrementalTin triangulation, final boolean preservePerimeter) { - /*- - * From 'Fast unstructured quadrilateral mesh generation'. - * A better coloring approach is given in 'Face coloring in unstructured CFD codes'. - * - * First partition the edges of the triangular mesh into three groups such that - * no triangle has two edges of the same color (find groups by reducing to a - * graph-coloring). - * Then obtain an all-quadrilateral mesh by removing all edges of *one* - * particular color. - */ - final boolean unconstrained = triangulation.getConstraints().isEmpty(); - final AbstractBaseGraph graph = new SimpleGraph<>(DefaultEdge.class); - TriangleCollector.visitSimpleTriangles(triangulation, t -> { - final IConstraint constraint = t.getContainingRegion(); - if (unconstrained || (constraint != null && constraint.definesConstrainedRegion())) { - graph.addVertex(t.getEdgeA().getBaseReference()); - graph.addVertex(t.getEdgeB().getBaseReference()); - graph.addVertex(t.getEdgeC().getBaseReference()); - - graph.addEdge(t.getEdgeA().getBaseReference(), t.getEdgeB().getBaseReference()); - graph.addEdge(t.getEdgeA().getBaseReference(), t.getEdgeC().getBaseReference()); - graph.addEdge(t.getEdgeB().getBaseReference(), t.getEdgeC().getBaseReference()); - } - }); - - Coloring coloring = new RLFColoring<>(graph, 1337).getColoring(); - - final HashSet perimeter = new HashSet<>(triangulation.getPerimeter()); - if (!unconstrained) { - perimeter.clear(); // clear, the perimeter of constrained tin is unaffected by the constraint - } - - final Collection meshEdges = new ArrayList<>(); - coloring.getColors().forEach((edge, color) -> { - /* - * "We can remove the edges of any one of the colors, however a convenient - * choice is the one that leaves the fewest number of unmerged boundary - * triangles". -- ideal, but not implemented here... - */ - // NOTE could now apply Topological optimization, as given in paper. - if ((color < 2) || (preservePerimeter && (edge.isConstraintRegionBorder() || perimeter.contains(edge)))) { - meshEdges.add(new PEdge(edge.getA().x, edge.getA().y, edge.getB().x, edge.getB().y)); - } - }); - - PShape quads = PGS.polygonizeNodedEdges(meshEdges); - if (triangulation.getConstraints().size() < 2) { // assume constraint 1 is the boundary (not a hole) - return quads; - } else { - return removeHoles(quads, triangulation); - } + return EdgePrunedFaces.edgeCollapseQuadrangulation(triangulation, preservePerimeter); } /** @@ -471,33 +307,70 @@ public static PShape edgeCollapseQuadrangulation(final IIncrementalTin triangula * @return a GROUP PShape, where each child shape is one quadrangle * @since 1.2.0 */ - public static PShape centroidQuadrangulation(final IIncrementalTin triangulation, final boolean preservePerimeter) { - final boolean unconstrained = triangulation.getConstraints().isEmpty(); - final HashSet edges = new HashSet<>(); - TriangleCollector.visitSimpleTriangles(triangulation, t -> { - final IConstraint constraint = t.getContainingRegion(); - if (unconstrained || (constraint != null && constraint.definesConstrainedRegion())) { - Vertex centroid = centroid(t); - edges.add(new PEdge(centroid.getX(), centroid.getY(), t.getVertexA().x, t.getVertexA().y)); - edges.add(new PEdge(centroid.getX(), centroid.getY(), t.getVertexB().x, t.getVertexB().y)); - edges.add(new PEdge(centroid.getX(), centroid.getY(), t.getVertexC().x, t.getVertexC().y)); + public static PShape centroidQuadrangulation(final IIncrementalTin tin, final boolean preservePerimeter) { + final boolean unconstrained = tin.getConstraints().isEmpty(); + final var gf = PGS.GEOM_FACTORY; + + // Collect accepted triangles: centroids + base-edge adjacency + final List centroids = new ArrayList<>(); + final Map edgeAdj = new IdentityHashMap<>(); + + TriangleCollector.visitSimpleTriangles(tin, t -> { + final IConstraint c = t.getContainingRegion(); + if (!(unconstrained || (c != null && c.definesConstrainedRegion()))) { + return; } + + final int tid = centroids.size(); + centroids.add(new double[] { (t.getVertexA().x + t.getVertexB().x + t.getVertexC().x) / 3.0, + (t.getVertexA().y + t.getVertexB().y + t.getVertexC().y) / 3.0 }); + + addAdj(edgeAdj, t.getEdgeA().getBaseReference(), tid); + addAdj(edgeAdj, t.getEdgeB().getBaseReference(), tid); + addAdj(edgeAdj, t.getEdgeC().getBaseReference(), tid); }); - if (preservePerimeter) { - List perimeter = triangulation.getPerimeter(); - triangulation.edges().forEach(edge -> { - if (edge.isConstraintRegionBorder() || (unconstrained && perimeter.contains(edge))) { - edges.add(new PEdge(edge.getA().x, edge.getA().y, edge.getB().x, edge.getB().y)); - } - }); + if (centroids.isEmpty()) { + return new PShape(); + } + + final List out = new ArrayList<>(); + for (Map.Entry en : edgeAdj.entrySet()) { + final IQuadEdge e = en.getKey(); + final int[] inc = en.getValue(); + final Vertex A = e.getA(), B = e.getB(); + + if (inc[0] >= 0 && inc[1] >= 0) { + // Interior edge -> quad [A, c0, B, c1, A] + final double[] c0 = centroids.get(inc[0]); + final double[] c1 = centroids.get(inc[1]); + final Coordinate[] ring = new Coordinate[] { new Coordinate(A.x, A.y), new Coordinate(c0[0], c0[1]), new Coordinate(B.x, B.y), + new Coordinate(c1[0], c1[1]), new Coordinate(A.x, A.y) }; + out.add(PGS_Conversion.toPShape(gf.createPolygon(ring))); + } else if (preservePerimeter) { + // Boundary edge -> triangle [A, c, B, A] + final int tid = inc[0] >= 0 ? inc[0] : inc[1]; + final double[] c = centroids.get(tid); + final Coordinate[] tri = new Coordinate[] { new Coordinate(A.x, A.y), new Coordinate(c[0], c[1]), new Coordinate(B.x, B.y), + new Coordinate(A.x, A.y) }; + out.add(PGS_Conversion.toPShape(gf.createPolygon(tri))); + } } - final PShape quads = PGS.polygonizeNodedEdges(edges); - if (triangulation.getConstraints().size() < 2) { // assume constraint 1 is the boundary (not a hole) - return quads; + final PShape quads = PGS_Conversion.flatten(out); + return quads; + } + + private static void addAdj(Map adj, IQuadEdge base, int tid) { + int[] a = adj.get(base); + if (a == null) { + a = new int[] { -1, -1 }; + adj.put(base, a); + } + if (a[0] < 0) { + a[0] = tid; } else { - return removeHoles(quads, triangulation); + a[1] = tid; } } @@ -546,9 +419,9 @@ public static PShape matchingQuadrangulation(final IIncrementalTin triangulation Set seen = new HashSet<>(g.vertexSet()); var quads = collapsedEdges.stream().map(e -> { var t1 = g.getEdgeSource(e); - var f1 = toPShape(t1); + var f1 = triToPShape(t1); var t2 = g.getEdgeTarget(e); - var f2 = toPShape(t2); + var f2 = triToPShape(t2); seen.remove(t1); seen.remove(t2); @@ -558,82 +431,12 @@ public static PShape matchingQuadrangulation(final IIncrementalTin triangulation // include uncollapsed triangles (if any) seen.forEach(t -> { - quads.add(toPShape(t)); + quads.add(triToPShape(t)); }); return PGS_Conversion.flatten(quads); } - /** - * Removes (what should be) holes from a polygonized quadrangulation. - *

- * When the polygonizer is applied to the collapsed triangles of a - * triangulation, it cannot determine which collapsed regions represent holes in - * the quadrangulation and will consequently fill them in. The subroutine below - * restores holes/topology, detecting which polygonized face(s) are original - * holes. Note the geometry of the original hole/constraint and its associated - * polygonized face are different, since quads are polygonized, not triangles - * (hence an overlap metric is used to match candidates). - * - * @param faces faces of the quadrangulation - * @param triangulation - * @return - */ - private static PShape removeHoles(PShape faces, IIncrementalTin triangulation) { - List holes = new ArrayList<>(triangulation.getConstraints()); // copy list - if (holes.size() <= 1) { - return faces; - } - holes = holes.subList(1, holes.size()); // slice off perimeter constraint (not a hole) - - STRtree tree = new STRtree(); - holes.stream().map(constraint -> constraint.getVertices()).iterator().forEachRemaining(vertices -> { - CoordinateList coords = new CoordinateList(); // coords of constraint - vertices.forEach(v -> coords.add(new Coordinate(v.x, v.y))); - coords.closeRing(); - - if (!Orientation.isCCWArea(coords.toCoordinateArray())) { // triangulation holes are CW - Polygon polygon = PGS.GEOM_FACTORY.createPolygon(coords.toCoordinateArray()); - tree.insert(polygon.getEnvelopeInternal(), polygon); - } - }); - - List nonHoles = PGS_Conversion.getChildren(faces).parallelStream().filter(quad -> { - /* - * If quad overlaps with a hole detect whether it *is* that hole via Hausdorff - * Similarity. - */ - final Geometry g = PGS_Conversion.fromPShape(quad); - - @SuppressWarnings("unchecked") - List matches = tree.query(g.getEnvelopeInternal()); - - for (Polygon m : matches) { - try { - // PGS_ShapePredicates.overlap() inlined here - Geometry overlap = OverlayNG.overlay(m, g, OverlayNG.INTERSECTION); - double a1 = g.getArea(); - double a2 = m.getArea(); - double total = a1 + a2; - double aOverlap = overlap.getArea(); - double w1 = a1 / total; - double w2 = a2 / total; - - double similarity = w1 * (aOverlap / a1) + w2 * (aOverlap / a2); - if (similarity > 0.2) { // magic constant, unsure what the best value is - return false; // is hole; keep=false - } - } catch (Exception e) { // catch occasional noded error - continue; - } - - } - return true; // is not hole; keep=true - }).collect(Collectors.toList()); - - return PGS_Conversion.flatten(nonHoles); - } - /** * Produces a quadrangulation from a point set. The resulting quadrangulation * has a characteristic spiral pattern. @@ -981,22 +784,94 @@ static Pair, List> extractInnerEdgesAndVertices(PShape mesh return Pair.of(new ArrayList<>(inner), innerVerts); } + /** + * Repairs broken faces in near-coverage linework using endpoint-only snapping, + * then polygonises the result. + *

+ * Targets face-level defects in a line arrangement that is intended to form a + * valid coverage but doesn’t quite join exactly (e.g., near-misses, tiny + * endpoint gaps, unclosed rings). It performs endpoint-only snapping within + * {@code tolerance} and then polygonises the result. + *

+ *
    + *
  • Performs Endpoint-only snapping (not general vertex snapping): + *
      + *
    • Only line endpoints move; interior vertices are not adjusted, and polygon + * vertices are never moved.
    • + *
    • Endpoints (and polygon vertices) within {@code tolerance} form transitive + * clusters.
    • + *
    • If a cluster contains any polygon vertex, endpoints snap to that vertex + * (polygons act as fixed anchors).
    • + *
    • If a cluster contains only endpoints, they snap mutually to the cluster + * mean, closing gaps where no valid nodes exist yet.
    • + *
    • Closed LineStrings are treated as polygons and ignored for snapping.
    • + *
    + *
  • + *
  • Polygonisation: + *
      + *
    • Builds faces from the snapped linework and returns a flattened shape + * containing faces, cut edges, and dangles.
    • + *
    + *
  • + *
+ *

+ * Outcome: This focuses on repairing faces from near-coverage linework. As a + * side effect, it often yields a valid or more valid coverage, but it is not a + * general cleaner for inter-face gaps/overlaps. + *

+ *

+ * For cleaning breaks between faces (gaps, overlaps, slivers, misaligned shared + * edges) in an existing coverage, use {@link #fixBreaks(PShape, double, double) + * fixBreaks()}. + *

+ * + * @param coverage input coverage as a {@link PShape}; may include polygons and + * broken boundary lines + * @param tolerance maximum distance within which endpoints may be + * clustered/snapped + * @return a flattened {@link PShape} containing polygonal faces, and any + * remaining linework + * @see #fixBreaks(PShape, double, double) + * @since 2.2 + */ + @SuppressWarnings("unchecked") + public static PShape fixBrokenFaces(PShape coverage, double tolerance) { + var g = fromPShape(coverage); + EndpointSnapper snapper = new EndpointSnapper(tolerance); + var fixed = snapper.snapEndpoints(g, true); + + Polygonizer p = new Polygonizer(false); + p.add(fixed); + var polys = toPShape(p.getPolygons()); + var cuts = toPShape(p.getCutEdges()); + var dangles = toPShape(p.getDangles()); + + return PGS_Conversion.flatten(polys, cuts, dangles); + } + /** * Removes gaps and overlaps from meshes/polygon collections that are intended * to satisfy the following conditions: *
    - *
  • Vector-clean - edges between neighbouring polygons must either be + *
  • Vector-clean — edges between neighbouring polygons must either be * identical or intersect only at endpoints.
  • - *
  • Non-overlapping - No two polygons may overlap. Equivalently, polygons - * must be interior-disjoint.
  • + *
  • Non-overlapping — no two polygons may overlap (polygons are + * interior-disjoint).
  • *
*

- * It may not always be possible to perfectly clean the input. + * Note: This operates on breaks between faces (inter-polygon gaps, + * overlaps, slivers, and misaligned shared edges), not on “broken” faces / line + * arrangements with unclosed lines or endpoint gaps. For repairing broken faces + * via endpoint-only snapping, see {@link #fixBrokenFaces(PShape, double) + * fixBrokenFaces()}. + *

*

- * While this method is intended to be used to fix malformed coverages, it also - * can be used to snap collections of disparate polygons together. - * - * @param coverage a GROUP shape, consisting of the polygonal faces to + * It may not always be possible to perfectly clean the input. While this method + * is intended for malformed coverages, it can also snap collections of + * disparate polygons together. + *

+ * + * @param coverage a GROUP shape consisting of the polygonal faces to * clean * @param distanceTolerance the distance below which segments and vertices are * considered to match @@ -1005,6 +880,7 @@ static Pair, List> extractInnerEdgesAndVertices(PShape mesh * @return GROUP shape whose child polygons satisfy a (hopefully) valid coverage * @since 1.3.0 * @see #findBreaks(PShape) + * @see #fixBrokenFaces(PShape, double) */ public static PShape fixBreaks(PShape coverage, double distanceTolerance, double angleTolerance) { final List geometries = PGS_Conversion.getChildren(coverage).stream().map(PGS_Conversion::fromPShape).collect(Collectors.toList()); @@ -1135,43 +1011,7 @@ private static PShape applyOriginalStyling(final PShape newMesh, final PShape ol return newMesh; } - /** - * Calculate the longest edge of a given triangle. - */ - private static IQuadEdge findLongestEdge(final SimpleTriangle t) { - if (t.getEdgeA().getLength() > t.getEdgeB().getLength()) { - if (t.getEdgeC().getLength() > t.getEdgeA().getLength()) { - return t.getEdgeC(); - } else { - return t.getEdgeA(); - } - } else { - if (t.getEdgeC().getLength() > t.getEdgeB().getLength()) { - return t.getEdgeC(); - } else { - return t.getEdgeB(); - } - } - } - - private static double[] midpoint(final IQuadEdge edge) { - final Vertex a = edge.getA(); - final Vertex b = edge.getB(); - return new double[] { (a.x + b.x) / 2d, (a.y + b.y) / 2d }; - } - - private static Vertex centroid(final SimpleTriangle t) { - final Vertex a = t.getVertexA(); - final Vertex b = t.getVertexB(); - final Vertex c = t.getVertexC(); - double x = a.x + b.x + c.x; - x /= 3; - double y = a.y + b.y + c.y; - y /= 3; - return new Vertex(x, y, 0); - } - - private static PShape toPShape(SimpleTriangle t) { + private static PShape triToPShape(SimpleTriangle t) { PVector vertexA = new PVector((float) t.getVertexA().x, (float) t.getVertexA().y); PVector vertexB = new PVector((float) t.getVertexB().x, (float) t.getVertexB().y); PVector vertexC = new PVector((float) t.getVertexC().x, (float) t.getVertexC().y); diff --git a/src/main/java/micycle/pgs/PGS_Morphology.java b/src/main/java/micycle/pgs/PGS_Morphology.java index 783e2664..f876e16b 100644 --- a/src/main/java/micycle/pgs/PGS_Morphology.java +++ b/src/main/java/micycle/pgs/PGS_Morphology.java @@ -912,7 +912,13 @@ public static PShape interpolate(PShape from, PShape to, int frames) { * @since 1.3.0 */ public static PShape reducePrecision(PShape shape, double precision) { - return toPShape(GeometryPrecisionReducer.reduce(fromPShape(shape), new PrecisionModel(-Math.max(Math.abs(precision), 1e-10)))); + var pm = new PrecisionModel(-Math.max(Math.abs(precision), 1e-10)); + if (shape.getFamily() == PShape.GROUP) { + // pointwise preserves polygon faces (doesn't merge) + return toPShape(GeometryPrecisionReducer.reducePointwise(fromPShape(shape), pm)); + } else { + return toPShape(GeometryPrecisionReducer.reduce(fromPShape(shape), pm)); + } } /** diff --git a/src/main/java/micycle/pgs/PGS_Optimisation.java b/src/main/java/micycle/pgs/PGS_Optimisation.java index d20d4adf..7a64287a 100644 --- a/src/main/java/micycle/pgs/PGS_Optimisation.java +++ b/src/main/java/micycle/pgs/PGS_Optimisation.java @@ -16,6 +16,7 @@ import org.apache.commons.lang3.tuple.Triple; import org.locationtech.jts.algorithm.MinimumAreaRectangle; import org.locationtech.jts.algorithm.MinimumBoundingCircle; +import org.locationtech.jts.algorithm.MinimumBoundingTriangle; import org.locationtech.jts.algorithm.MinimumDiameter; import org.locationtech.jts.algorithm.construct.LargestEmptyCircle; import org.locationtech.jts.algorithm.construct.MaximumInscribedCircle; @@ -27,6 +28,7 @@ import org.locationtech.jts.geom.Location; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; +import org.locationtech.jts.geom.Polygonal; import org.locationtech.jts.operation.distance.DistanceOp; import org.locationtech.jts.simplify.DouglasPeuckerSimplifier; import org.locationtech.jts.util.GeometricShapeFactory; @@ -46,7 +48,6 @@ import micycle.pgs.commons.MaximumInscribedRectangle; import micycle.pgs.commons.MaximumInscribedTriangle; import micycle.pgs.commons.MinimumBoundingEllipse; -import micycle.pgs.commons.MinimumBoundingTriangle; import micycle.pgs.commons.Nullable; import micycle.pgs.commons.SpiralIterator; import micycle.pgs.commons.VisibilityPolygon; @@ -522,7 +523,7 @@ public static PShape minimumBoundingEllipse(PShape shape, double errorTolerance) * @param shape */ public static PShape minimumBoundingTriangle(PShape shape) { - MinimumBoundingTriangle mbt = new MinimumBoundingTriangle(fromPShape(shape)); + var mbt = new MinimumBoundingTriangle(fromPShape(shape)); return toPShape(mbt.getTriangle()); } @@ -875,10 +876,10 @@ public static PVector closestVertex(PShape shape, PVector queryPoint) { if (vertices.isEmpty()) { return null; } - float minDistSq = Float.POSITIVE_INFINITY; + double minDistSq = Double.POSITIVE_INFINITY; PVector closest = null; for (PVector v : vertices) { - float distSq = PVector.dist(v, queryPoint); + double distSq = PGS.distanceSq(v, queryPoint); if (distSq < minDistSq) { minDistSq = distSq; closest = v; @@ -910,7 +911,7 @@ public static PVector closestVertex(PShape shape, PVector queryPoint) { */ public static PVector closestPoint(PShape shape, PVector point) { Geometry g = fromPShape(shape); - Coordinate coord = DistanceOp.nearestPoints(g, PGS.pointFromPVector(point))[0]; + Coordinate coord = DistanceOp.nearestPoints(g.getBoundary(), PGS.pointFromPVector(point))[0]; return new PVector((float) coord.x, (float) coord.y); } @@ -956,9 +957,9 @@ public static PVector closestPoint(Collection points, PVector point) { */ public static List closestPoints(PShape shape, PVector point) { Geometry g = fromPShape(shape); - ArrayList points = new ArrayList<>(); + List points = new ArrayList<>(); for (int i = 0; i < g.getNumGeometries(); i++) { - final Coordinate coord = DistanceOp.nearestPoints(g.getGeometryN(i), PGS.pointFromPVector(point))[0]; + final Coordinate coord = DistanceOp.nearestPoints(g.getGeometryN(i).getBoundary(), PGS.pointFromPVector(point))[0]; points.add(PGS.toPVector(coord)); } return points; @@ -1272,22 +1273,30 @@ public static PVector solveApollonius(PVector c1, PVector c2, PVector c3, int s1 } /** - * Computes a visibility polygon / isovist, the area visible from a given point - * in a space, considering occlusions caused by obstacles. In this case, - * obstacles comprise the line segments of input shape. + * Computes the visibility polygon (isovist): the region visible from a given + * viewpoint, with occlusions caused by the edges of the supplied shape. * - * @param obstacles shape representing obstacles, which may have any manner of - * polygon and line geometries. - * @param viewPoint view point from which to compute visibility. If the input if - * polygonal, the viewpoint may lie outside the polygon. - * @return a polygonal shape representing the visibility polygon. + * @param obstacles a PShape whose edges serve as occluding obstacles; may + * contain polygons and/or lines. + * @param viewPoint the viewpoint from which visibility is computed. If the + * input if polygonal, the viewpoint may lie outside the + * polygon. + * @return a polygon representing the visible region from {@code viewPoint} * @since 1.4.0 * @see #visibilityPolygon(PShape, Collection) */ public static PShape visibilityPolygon(PShape obstacles, PVector viewPoint) { + var g = fromPShape(obstacles); + var p = PGS.pointFromPVector(viewPoint); + VisibilityPolygon vp = new VisibilityPolygon(); - vp.addGeometry(fromPShape(obstacles)); - return toPShape(vp.getIsovist(PGS.coordFromPVector(viewPoint), true)); + vp.addGeometry(g); + + /* + * Skip adding envelope only when viewpoint is in a polygon. + */ + var isovist = vp.getIsovist(p.getCoordinate(), (g instanceof Polygonal) ? !g.contains(p) : true); + return toPShape(isovist); } /** diff --git a/src/main/java/micycle/pgs/PGS_PointSet.java b/src/main/java/micycle/pgs/PGS_PointSet.java index 1df51711..f5dbdecf 100644 --- a/src/main/java/micycle/pgs/PGS_PointSet.java +++ b/src/main/java/micycle/pgs/PGS_PointSet.java @@ -1153,11 +1153,12 @@ public static List applyRandomWeights(List points, double minW * with a random weight assigned to its z-coordinate * @since 2.0 */ - public static List applyRandomWeights(List points, double minWeight, double maxWeight, long seed) { + public static List applyRandomWeights(List points, final double minWeight, final double maxWeight, final long seed) { final SplittableRandom random = new SplittableRandom(seed); return points.stream().map(p -> { p = p.copy(); - p.z = (float) random.nextDouble(minWeight, maxWeight); + var w = minWeight == maxWeight ? minWeight : random.nextDouble(minWeight, maxWeight); + p.z = (float) w; return p; }).collect(Collectors.toList()); } diff --git a/src/main/java/micycle/pgs/PGS_Processing.java b/src/main/java/micycle/pgs/PGS_Processing.java index 0042648c..6bc80843 100644 --- a/src/main/java/micycle/pgs/PGS_Processing.java +++ b/src/main/java/micycle/pgs/PGS_Processing.java @@ -888,32 +888,28 @@ public static PShape extractHoles(PShape shape) { } /** - * Finds the polygonal faces formed by a set of intersecting line segments. - * - * @param lineSegmentVertices a list of PVectors where each pair (couplet) of - * PVectors represent the start and end point of one - * line segment - * @return a GROUP PShape where each child shape is a face / enclosed area - * formed between intersecting lines - * @since 1.1.2 + * Finds polygonal faces from the given shape's linework. + *

+ * This method extracts linework from the supplied PShape (including existing + * polygon edges and standalone line primitives), nodes intersections, and + * polygonizes the resulting segment network. Only closed polygonal faces + * (enclosed areas) are returned. Open edges, dangling line segments + * ("dangles"), and isolated lines that do not form a closed ring are ignored + * and dropped — the result contains faces only. + * + * The returned PShape is a GROUP whose children are PShapes representing each + * detected face. + * + * @param shape a PShape whose linework (edges) will be used to find polygonal + * faces; can include existing polygons or line primitives + * @return a GROUP PShape containing only the polygonal faces discovered from + * the input linework; dangles and non-enclosed edges are not included + * @since 2.2 */ - public static PShape polygonizeLines(List lineSegmentVertices) { - // TODO constructor for LINES PShape - if (lineSegmentVertices.size() % 2 != 0) { - System.err.println("The input to polygonizeLines() contained an odd number of vertices. The method expects successive pairs of vertices."); - return new PShape(); - } - - final List segmentStrings = new ArrayList<>(lineSegmentVertices.size() / 2); - for (int i = 0; i < lineSegmentVertices.size(); i += 2) { - final PVector v1 = lineSegmentVertices.get(i); - final PVector v2 = lineSegmentVertices.get(i + 1); - if (!v1.equals(v2)) { - segmentStrings.add(new NodedSegmentString(new Coordinate[] { PGS.coordFromPVector(v1), PGS.coordFromPVector(v2) }, null)); - } - } - - return PGS.polygonizeSegments(segmentStrings, true); + public static PShape polygonize(PShape shape) { + var g = fromPShape(shape); + var segs = SegmentStringUtil.extractNodedSegmentStrings(g); + return PGS.polygonizeSegments(segs, true); } /** diff --git a/src/main/java/micycle/pgs/PGS_Tiling.java b/src/main/java/micycle/pgs/PGS_Tiling.java index ea8e3807..8a0b0908 100644 --- a/src/main/java/micycle/pgs/PGS_Tiling.java +++ b/src/main/java/micycle/pgs/PGS_Tiling.java @@ -21,6 +21,8 @@ import micycle.pgs.commons.PEdge; import micycle.pgs.commons.PenroseTiling; import micycle.pgs.commons.RectangularSubdivision; +import micycle.pgs.commons.SoftCells; +import micycle.pgs.commons.SoftCells.TangentMode; import micycle.pgs.commons.SquareTriangleTiling; import micycle.pgs.commons.TriangleSubdivision; import processing.core.PConstants; @@ -396,16 +398,17 @@ public static PShape hexTiling(final double width, final double height, final do public static PShape islamicTiling(final double width, final double height, final double w, final double h) { // adapted from https://openprocessing.org/sketch/320133 final double[] vector = { -w, 0, w, -h, w, 0, -w, h }; - final ArrayList segments = new ArrayList<>(); + var s = PGS.prepareLinesPShape(null, null, null); for (int x = 0; x < width; x += w * 2) { for (int y = 0; y < height; y += h * 2) { for (int i = 0; i <= vector.length; i++) { - segments.add(new PVector((float) (vector[i % vector.length] + x + w), (float) (vector[(i + 6) % vector.length] + y + h))); - segments.add(new PVector((float) (vector[(i + 1) % vector.length] + x + w), (float) (vector[(i + 1 + 6) % vector.length] + y + h))); + s.vertex((float) (vector[i % vector.length] + x + w), (float) (vector[(i + 6) % vector.length] + y + h)); + s.vertex((float) (vector[(i + 1) % vector.length] + x + w), (float) (vector[(i + 1 + 6) % vector.length] + y + h)); } } } - return PGS_Processing.polygonizeLines(segments); + s.endShape(); + return PGS_Processing.polygonize(s); } /** @@ -516,6 +519,58 @@ public static PShape annularBricks(final int nRings, final double cx, final doub return PGS_Conversion.flatten(bricks); } + /** + * Generates a softened (curved) version of a tiling using the SoftCells + * edge-bending algorithm. + * + *

+ * The input mesh straight edges are softened into smooth, Bezier-like curves + * according to the supplied parameters. The resulting shape preserves the mesh + * topology (combinatorial adjacency) while altering the geometry to produce the + * characteristic "soft cell" appearance. + *

+ * + *

+ * The implementation samples random directions once per vertex (not per edge) + * when a stochastic tangent mode is selected. The {@code seed} only influences + * the following TangentMode values: + *

+ *
    + *
  • {@code RANDOM} - a random unit direction (one angle) is chosen once per + * vertex;
  • + *
  • {@code RANDOM_DIAGONAL} - one of the two diagonal directions (diag1 or + * diag2) is chosen once per vertex;
  • + *
  • {@code RANDOM_60DEG} - one of three 60° directions is selected once per + * vertex.
  • + *
+ * + * @param mesh the input PShape representing the base tiling to be softened; + * must not be null. The input is not modified — a new PShape is + * returned. + * @param ratio a floating-point control for the amount of softening/edge + * bending. Typical usage treats this as a normalised factor + * (commonly in the [0,1] range) where smaller values produce + * subtler curvature and larger values produce stronger softening + * (values much larger than may lead to face self-intersection). + * @param mode the TangentMode that selects how half-tangents / edge directions + * are chosen and aligned during the edge-bending process; see + * TangentMode for available modes and behaviour. + * @param seed random seed used to initialise the RNG. The seed only affects + * the stochastic tangent modes listed above; using the same seed + * with the same input mesh and parameters yields deterministic, + * repeatable output. + * @return a new PShape containing the softened tessellation (curved/soft cells) + * corresponding to the input mesh and parameters. + * @since 2.2 + */ + public static PShape softCells(PShape mesh, double ratio, TangentMode mode, long seed) { + SoftCells sc = new SoftCells(seed); + mesh = PGS_Optimisation.hilbertSortFaces(mesh); + var cells = sc.generate(mesh, mode, (float) ratio); + cells = PGS_Conversion.setAllStrokeColor(cells, Colors.PINK, 2); + return cells; + } + /** * Generates a hexagon shape. * diff --git a/src/main/java/micycle/pgs/commons/EdgePrunedFaces.java b/src/main/java/micycle/pgs/commons/EdgePrunedFaces.java new file mode 100644 index 00000000..f087882d --- /dev/null +++ b/src/main/java/micycle/pgs/commons/EdgePrunedFaces.java @@ -0,0 +1,591 @@ +package micycle.pgs.commons; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import org.jgrapht.alg.interfaces.VertexColoringAlgorithm.Coloring; +import org.jgrapht.alg.spanning.GreedyMultiplicativeSpanner; +import org.jgrapht.graph.AbstractBaseGraph; +import org.jgrapht.graph.DefaultEdge; +import org.jgrapht.graph.SimpleGraph; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.GeometryFactory; +import org.tinfour.common.IConstraint; +import org.tinfour.common.IIncrementalTin; +import org.tinfour.common.IQuadEdge; +import org.tinfour.common.Vertex; +import org.tinfour.utils.TriangleCollector; +import org.tinspin.index.PointMap; +import org.tinspin.index.kdtree.KDTree; + +import micycle.pgs.PGS_Conversion; +import micycle.pgs.PGS_Triangulation; +import processing.core.PShape; + +/** + * Utilities for extracting polygonal faces from a TIN by pruning TIN edges + * according to a rule, grouping triangles across the pruned edges, and tracing + * group boundaries. + * + *

+ * Core idea + *

+ *
    + *
  • Start with a Delaunay TIN ({@link IIncrementalTin}).
  • + *
  • Drop a subset of base edges according to a rule (Urquhart, Gabriel, RNG, + * k-spanner, etc.).
  • + *
  • Merge triangles across any dropped edge (DSU / union-find).
  • + *
  • For each merged component, collect only its boundary half-edges (those + * not dropped and not shared by two triangles of the component), then sequence + * them to form a ring and emit a polygon.
  • + *
+ * + *

+ * Benefits + *

+ *
    + *
  • Linear-time over triangles/edges for the TIN portion; avoids + * polygonization / noding / unions.
  • + *
  • Robust topology, since edges are taken directly from the TIN; boundaries + * are sequenced only.
  • + *
  • Pluggable rules via a simple {@code DropRule} interface.
  • + *
  • Consistent perimeter handling: when {@code preservePerimeter} is true, + * constraint/hull borders are never dropped.
  • + *
+ * + *

+ * Implemented rules + *

+ *
    + *
  • Urquhart: drop the longest base edge of each triangle.
  • + *
  • Gabriel: drop uv if the midpoint of uv has a nearest vertex that is + * neither u nor v.
  • + *
  • Relative Neighborhood (RNG): drop uv if there exists w with max(d(u,w), + * d(v,w)) < d(u,v).
  • + *
  • k-Spanner: keep only edges chosen by a greedy multiplicative spanner on + * the TIN graph (SimpleGraph<Vertex, IQuadEdge>), drop the rest.
  • + *
  • Edge-collapse quadrangulation: 3-color the three edges of each triangle + * (via graph coloring) and drop all edges with color ≥ 2 (or equivalently, + * keep colors 0–1) to form mostly quadrilateral faces; honors + * perimeter/constraint borders when requested.
  • + *
+ * + * @author Michael Carleton + */ +public class EdgePrunedFaces { + + // thin wrappers around the common pipelines + + public static PShape urquhartFaces(final IIncrementalTin tin, final boolean preservePerimeter) { + return facesSequencedCommon(tin, preservePerimeter, URQUHART_RULE); + } + + public static PShape gabrielFaces(final IIncrementalTin tin, final boolean preservePerimeter) { + return facesSequencedCommon(tin, preservePerimeter, GABRIEL_RULE); + } + + public static PShape relativeNeighborFaces(final IIncrementalTin tin, final boolean preservePerimeter) { + return facesSequencedCommon(tin, preservePerimeter, RELATIVE_NEIGHBOR_RULE); + } + + public static PShape spannerFaces(final IIncrementalTin tin, int k, final boolean preservePerimeter) { + return facesSequencedCommon(tin, preservePerimeter, spannerDropRule(tin, k)); + } + + public static PShape edgeCollapseQuadrangulation(final IIncrementalTin tin, final boolean preservePerimeter) { + return facesSequencedCommon(tin, preservePerimeter, edgeCollapseQuadrangulationDropRule(tin)); + } + + /** + * Common pipeline: collect mesh, mark dropped edges via rule, group via DSU, + * sequence boundaries to polygons. + * + * @param tin + * @param preservePerimeter + * @param rule + * @return + */ + private static PShape facesSequencedCommon(final IIncrementalTin tin, final boolean preservePerimeter, final DropRule rule) { + final GeometryFactory gf = new GeometryFactory(); + final boolean notConstrained = tin.getConstraints().isEmpty(); + + // Collect triangles, adjacency, and unique vertices + final List tris = new ArrayList<>(); + final Map edgeAdj = new IdentityHashMap<>(); + final Set vset = new HashSet<>(); + + TriangleCollector.visitSimpleTriangles(tin, t -> { + final IConstraint c = t.getContainingRegion(); + if (!(notConstrained || (c != null && c.definesConstrainedRegion()))) { + return; + } + + final int tid = tris.size(); + final IQuadEdge ea = t.getEdgeA(); + final IQuadEdge eb = t.getEdgeB(); + final IQuadEdge ec = t.getEdgeC(); + tris.add(new TriRec(ea, eb, ec)); + + addAdj(edgeAdj, ea.getBaseReference(), tid); + addAdj(edgeAdj, eb.getBaseReference(), tid); + addAdj(edgeAdj, ec.getBaseReference(), tid); + + vset.add(t.getVertexA()); + vset.add(t.getVertexB()); + vset.add(t.getVertexC()); + }); + + final int nTri = tris.size(); + if (nTri == 0) { + return new PShape(); + } + + final MeshCtx ctx = new MeshCtx(tris, edgeAdj, new ArrayList<>(vset)); + + // Mark dropped edges according to the rule + final Set dropped = Collections.newSetFromMap(new IdentityHashMap<>()); + rule.markDropped(ctx, preservePerimeter, dropped); + + // DSU across any dropped base edge + final DSU dsu = new DSU(nTri); + for (IQuadEdge e : dropped) { + final int[] inc = edgeAdj.get(e); + if (inc != null && inc[0] >= 0 && inc[1] >= 0) { + dsu.union(inc[0], inc[1]); + } + } + + // Group triangles + final Map> comp = new HashMap<>(); + for (int t = 0; t < nTri; t++) { + comp.computeIfAbsent(dsu.find(t), k -> new ArrayList<>()).add(t); + } + + // Build faces: collect boundary half-edges, sequence into one ring, emit + // polygon + var faces = comp.values().parallelStream().map(triIds -> { + final List boundary = new ArrayList<>(); + + for (int tid : triIds) { + final TriRec tr = tris.get(tid); + for (int i = 0; i < 3; i++) { + final IQuadEdge he = tr.e[i]; + final IQuadEdge base = he.getBaseReference(); + + // Skip dropped inner seams + if (dropped.contains(base)) { + continue; + } + + // If neighbor is in same group, edge is interior, skip + final int[] inc = edgeAdj.get(base); + final int nb = (inc == null) ? -1 : (inc[0] == tid ? inc[1] : inc[0]); + if (nb >= 0 && dsu.find(nb) == dsu.find(tid)) { + continue; + } + + // Boundary half-edge, interior on the left + boundary.add(he); + } + } + + if (boundary.isEmpty()) { + return null; + } + + final Coordinate[] ring = sequenceSingleLoop(boundary); + if (ring == null) { + return null; + } + + return PGS_Conversion.toPShape(gf.createPolygon(ring)); + }); + + return PGS_Conversion.flatten(faces.filter(Objects::nonNull).toList()); + } + + private static Coordinate[] sequenceSingleLoop(List edges) { + if (edges.isEmpty()) { + return null; + } + + final Map out = new IdentityHashMap<>(edges.size() * 2); + for (IQuadEdge e : edges) { + out.put(e.getA(), e); // assumes at most one outgoing per vertex on the boundary + } + + final IQuadEdge start = edges.get(0); + final Vertex startV = start.getA(); + + final List coords = new ArrayList<>(edges.size() + 1); + coords.add(new Coordinate(start.getA().x, start.getA().y)); + + IQuadEdge cur = start; + for (int i = 0; i < edges.size(); i++) { + coords.add(new Coordinate(cur.getB().x, cur.getB().y)); + if (cur.getB() == startV) { + break; // closed the loop + } + final IQuadEdge next = out.get(cur.getB()); + if (next == null) { + return null; // unexpected: dangling + } + cur = next; + } + + // Ensure closed + if (!coords.get(0).equals2D(coords.get(coords.size() - 1))) { + coords.add(new Coordinate(coords.get(0))); + } + if (coords.size() < 4) { + return null; + } + + return coords.toArray(new Coordinate[0]); + } + + private static final DropRule URQUHART_RULE = (ctx, preservePerimeter, dropped) -> { + // Urquhart rule: drop the longest base edge of each triangle + for (TriRec tr : ctx.tris) { + IQuadEdge e0 = tr.e[0].getBaseReference(); + IQuadEdge e1 = tr.e[1].getBaseReference(); + IQuadEdge e2 = tr.e[2].getBaseReference(); + + IQuadEdge max = e0; + double m2 = len2(e0); + double l1 = len2(e1); + if (l1 > m2) { + m2 = l1; + max = e1; + } + double l2 = len2(e2); + if (l2 > m2) { + max = e2; + } + + if (!preservePerimeter || !max.isConstraintRegionBorder()) { + dropped.add(max); + } + } + }; + + private static final DropRule GABRIEL_RULE = (ctx, preservePerimeter, dropped) -> { + // Gabriel rule: drop edges whose midpoint’s nearest vertex is neither endpoint + // Build KD-tree of all vertices + final PointMap tree = KDTree.create(2); + for (Vertex v : ctx.vertices) { + tree.insert(new double[] { v.x, v.y }, v); + } + + for (IQuadEdge base : ctx.edgeAdj.keySet()) { + if (preservePerimeter && base.isConstraintRegionBorder()) { + continue; + } + + final double mx = 0.5 * (base.getA().x + base.getB().x); + final double my = 0.5 * (base.getA().y + base.getB().y); + final Vertex nn = tree.query1nn(new double[] { mx, my }).value(); + + if (nn != base.getA() && nn != base.getB()) { + dropped.add(base); + } + } + }; + + private static final DropRule RELATIVE_NEIGHBOR_RULE = (ctx, preservePerimeter, dropped) -> { + // Relative Neighborhood Graph: drop uv if exists w in N(u) or N(v) with + // max(d(u,w), d(v,w)) < d(u,v) + // Build 1-ring vertex adjacency from base edges + final IdentityHashMap> nbrs = new IdentityHashMap<>(); + for (IQuadEdge base : ctx.edgeAdj.keySet()) { + final Vertex a = base.getA(); + final Vertex b = base.getB(); + nbrs.computeIfAbsent(a, k -> Collections.newSetFromMap(new IdentityHashMap<>())).add(b); + nbrs.computeIfAbsent(b, k -> Collections.newSetFromMap(new IdentityHashMap<>())).add(a); + } + + // Test each base edge against RNG condition, using squared distances + for (IQuadEdge base : ctx.edgeAdj.keySet()) { + if (preservePerimeter && base.isConstraintRegionBorder()) { + continue; + } + + final Vertex a = base.getA(); + final Vertex b = base.getB(); + final double l2 = dist2(a, b); + + boolean drop = false; + + // Check neighbors of a + final Set Na = nbrs.getOrDefault(a, Collections.emptySet()); + for (Vertex w : Na) { + if (w == b) { + continue; + } + if (Math.max(dist2(w, a), dist2(w, b)) < l2) { + drop = true; + break; + } + } + // If not dropped, check neighbors of b + if (!drop) { + final Set Nb = nbrs.getOrDefault(b, Collections.emptySet()); + for (Vertex w : Nb) { + if (w == a) { + continue; + } + if (Math.max(dist2(w, a), dist2(w, b)) < l2) { + drop = true; + break; + } + } + } + + if (drop) { + dropped.add(base); + } + } + }; + + private static DropRule spannerDropRule(final IIncrementalTin tin, final int kParam) { + final int k = Math.max(2, kParam); + return (ctx, preservePerimeter, dropped) -> { + final SimpleGraph g = PGS_Triangulation.toTinfourGraph(tin); + if (g.edgeSet().isEmpty()) { + return; + } + + final GreedyMultiplicativeSpanner sp = new GreedyMultiplicativeSpanner<>(g, k); + + // Build identity set of kept base refs from the spanner result + final Set kept = Collections.newSetFromMap(new IdentityHashMap<>()); + for (IQuadEdge e : sp.getSpanner()) { + kept.add(e.getBaseReference()); + } + + // Drop every base edge not in the spanner (unless perimeter preserved) + for (IQuadEdge base : ctx.edgeAdj.keySet()) { + if (preservePerimeter && base.isConstraintRegionBorder()) { + continue; + } + if (!kept.contains(base)) { + dropped.add(base); + } + } + }; + } + + private static DropRule edgeCollapseQuadrangulationDropRule(final IIncrementalTin tin) { + /*- + * From 'Fast unstructured quadrilateral mesh generation'. + * A better coloring approach is given in 'Face coloring in unstructured CFD codes'. + * + * First partition the edges of the triangular mesh into three groups such that + * no triangle has two edges of the same color (find groups by reducing to a + * graph-coloring). + * Then obtain an all-quadrilateral mesh by removing all edges of *one* + * particular color. + */ + final boolean unconstrained = tin.getConstraints().isEmpty(); + + // Collect unconstrained perimeter edges (base refs) if applicable + final Set perimeterBaseRefs = Collections.newSetFromMap(new IdentityHashMap<>()); + if (unconstrained) { + for (IQuadEdge e : tin.getPerimeter()) { + perimeterBaseRefs.add(e.getBaseReference()); + } + } + + return (ctx, preservePerimeter, dropped) -> { + // Build a graph where each vertex is a base edge; connect the 3 edges of each + // triangle + final AbstractBaseGraph g = new SimpleGraph<>(DefaultEdge.class); + for (TriRec tr : ctx.tris) { + final IQuadEdge a = tr.e[0].getBaseReference(); + final IQuadEdge b = tr.e[1].getBaseReference(); + final IQuadEdge c = tr.e[2].getBaseReference(); + + g.addVertex(a); + g.addVertex(b); + g.addVertex(c); + + g.addEdge(a, b); + g.addEdge(a, c); + g.addEdge(b, c); + } + if (g.vertexSet().isEmpty()) { + return; + } + + // 3-color the "edge graph" so no triangle has two edges of the same color + final Coloring coloring = new RLFColoring<>(g, 1337).getColoring(); + + // Mark all edges of the chosen color as dropped, honoring perimeter + // preservation + for (Map.Entry e : coloring.getColors().entrySet()) { + final IQuadEdge base = e.getKey(); + final int color = e.getValue(); + + /* + * NOTE 4-colorings are possible, so some triangles may have two or three edges + * with color >= 2, and yield faces larger than quads once edges are dropped. + */ + if (color < 2) { + continue; + } + + if (preservePerimeter) { + // Preserve constraint borders, and unconstrained outer perimeter + if (base.isConstraintRegionBorder() || perimeterBaseRefs.contains(base)) { + continue; + } + } + + dropped.add(base); + } + }; + } + + private static void addAdj(Map adj, IQuadEdge base, int tid) { + int[] a = adj.get(base); + if (a == null) { + a = new int[] { -1, -1 }; + adj.put(base, a); + } + if (a[0] < 0) { + a[0] = tid; + } else { + a[1] = tid; + } + } + + private static double dist2(Vertex p, Vertex q) { + return p.getDistanceSq(q); + } + + private static double len2(IQuadEdge e) { + final double dx = e.getA().x - e.getB().x; + final double dy = e.getA().y - e.getB().y; + return dx * dx + dy * dy; + } + + /** + * Strategy for selecting TIN base edges to drop prior to face extraction. + *

+ * The pipeline will union triangles across every dropped base edge, then trace + * the remaining edges to form polygon boundaries. A rule inspects the immutable + * mesh context and adds base edges (getBaseReference()) to the provided set. + *

+ *

+ * Typical usage idea: + *

+ * + *
+	 * DropRule rule = (ctx, preservePerimeter, dropped) -> {
+	 * 	for (IQuadEdge base : ctx.edgeAdj.keySet()) {
+	 * 		if (preservePerimeter && isPerimeter(ctx, base))
+	 * 			continue; // edge has <2 incident triangles
+	 * 		if (shouldDropAccordingToRule(base, ctx))
+	 * 			dropped.add(base);
+	 * 	}
+	 * };
+	 * 
+ *

+ * Notes: + *

+ *
    + *
  • Add base edges only (not half-edges); identity semantics apply.
  • + *
  • If preservePerimeter is true, do not drop perimeter/constraint + * borders.
  • + *
  • Do not mutate ctx; only populate the dropped set.
  • + *
  • Deterministic selection is recommended.
  • + *
+ */ + private interface DropRule { + /** + * Marks base edges to be removed. + * + * @param ctx immutable mesh data (triangles, base-edge adjacency, + * vertices) + * @param preservePerimeter when true, perimeter/constraint borders must be kept + * @param dropped identity set to populate with base edges to drop + */ + void markDropped(MeshCtx ctx, boolean preservePerimeter, Set dropped); + } + + // Collected mesh context from the TIN + private static final class MeshCtx { + final List tris; + final Map edgeAdj; // base edge -> up to 2 incident triangle ids + final List vertices; // unique vertices seen in accepted triangles + + MeshCtx(List tris, Map edgeAdj, List vertices) { + this.tris = tris; + this.edgeAdj = edgeAdj; + this.vertices = vertices; + } + } + + // Triangle record with oriented half-edges (triangle on left) + private static record TriRec(IQuadEdge[] e) { + public TriRec { + if (e == null) + throw new NullPointerException("e"); + if (e.length != 3) + throw new IllegalArgumentException("array must be length 3"); + e = e.clone(); // defensive copy before assignment + } + + public TriRec(IQuadEdge a, IQuadEdge b, IQuadEdge c) { + this(new IQuadEdge[] { a, b, c }); + } + + // override accessor to return a copy so callers can't mutate internal array + @Override + public IQuadEdge[] e() { + return e.clone(); + } + } + + // Simple DSU + private static final class DSU { + final int[] p, r; + + DSU(int n) { + p = new int[n]; + r = new int[n]; + for (int i = 0; i < n; i++) { + p[i] = i; + } + } + + int find(int x) { + return p[x] == x ? x : (p[x] = find(p[x])); + } + + void union(int a, int b) { + a = find(a); + b = find(b); + if (a == b) { + return; + } + if (r[a] < r[b]) { + int t = a; + a = b; + b = t; + } + p[b] = a; + if (r[a] == r[b]) { + r[a]++; + } + } + } + +} diff --git a/src/main/java/micycle/pgs/commons/GeneticColoring.java b/src/main/java/micycle/pgs/commons/GeneticColoring.java index a58cea5e..47e01a3b 100644 --- a/src/main/java/micycle/pgs/commons/GeneticColoring.java +++ b/src/main/java/micycle/pgs/commons/GeneticColoring.java @@ -1,11 +1,10 @@ package micycle.pgs.commons; import java.util.ArrayList; +import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.SplittableRandom; import org.jgrapht.Graph; @@ -14,56 +13,63 @@ import org.jgrapht.util.CollectionUtil; /** - * Finds a solution to a graph coloring using a genetic algorithm. + * Memetic (GA + local search) solver for the k-Coloring problem on a JGraphT + * graph. *

- * This class implements the technique described in Genetic Algorithm Applied - * to the Graph Coloring Problem by Musa M. Hindi and Roman V. - * Yampolskiy. - *

- * The genetic algorithm process continues until it either finds a solution - * (i.e. 0 conflicts between adjacent vertices) or the algorithm has been run - * for the predefined number of generations. + * The algorithm searches for a proper coloring by minimizing the number of + * conflicting edges (adjacent vertices with the same color). It targets a + * 4-coloring first; if unsuccessful within the generation limit, it repairs the + * best found assignment into a proper coloring using up to 5 colors. + * *

- * The goal of the algorithm is to improve the fitness of the population (a - * coloring) by mating its fittest individuals to produce superior offspring - * that offer a better solution to the problem. This process continues until a - * terminating condition is reached which could be simply that the total number - * of generations has been run or any other parameter like non-improvement of - * fitness over a certain number of generations or that a solution for the - * problem has been found. - * - * @author Soroush Javadi - * @author Refactored for JGraphT by Michael Carleton + * Approach (concise): + *

    + *
  • Representation: int chromosome of length |V|; gene = color in [0, + * k-1].
  • + *
  • Fitness: number of conflicting edges (counted once per edge).
  • + *
  • Initialization: strong seeds via DSATUR and degree-ordered greedy, plus + * random individuals.
  • + *
  • Selection: tournament; elitism preserves the best individuals each + * generation.
  • + *
  • Crossover: uniform (favoring agreement) and occasional 2-point + * crossover.
  • + *
  • Mutation: conflict-directed; for conflicted vertices choose a valid color + * using a boolean mask (micro-optimized); otherwise minimize conflicts.
  • + *
  • Local search: steepest-descent conflict repair applied to each child + * (memetic step).
  • + *
  • Diversification: partial re-seeding on stagnation to escape local + * minima.
  • + *
  • Precomputation: integer vertex IDs and neighbor arrays for fast inner + * loops.
  • + *
  • Termination: success when fitness = 0 or after maxGenerations; fallback + * repair guarantees a proper coloring.
  • + *
* - * @param the graph vertex type - * @param the graph edge type + * @param graph vertex type + * @param graph edge type */ public class GeneticColoring implements VertexColoringAlgorithm { private final int vertexCount; private final int maxGenerations; private final int populationSize; - // fitness threshold for choosing a parent selection and mutation algorithm private final int fitnessThreshold; private SplittableRandom rand; private int colorsCount; - final Map colors; + final java.util.Map colors; private final List neighborCache; - final Map vertexIds; - - /** - * Creates with a population size of 50; "the value was chosen after testing a - * number of different population sizes. The value 50 was the least value that - * produced the desired results". - * - * @param graph - */ - public GeneticColoring(Graph graph) { - this(graph, 100, 50, 4); + final java.util.Map vertexIds; + + // Precomputed degrees and degree order + private final int[] degrees; + private final int[] degreeOrder; + + public GeneticColoring(Graph graph, long seed) { + this(graph, 200, 100, 4, seed); // larger defaults to give memetic search room } - public GeneticColoring(Graph graph, int maxGenerations, int populationSize, int fitnessThreshold) { + public GeneticColoring(Graph graph, int maxGenerations, int populationSize, int fitnessThreshold, long seed) { if (graph == null || maxGenerations < 1 || populationSize < 2) { throw new IllegalArgumentException(); } @@ -72,32 +78,45 @@ public GeneticColoring(Graph graph, int maxGenerations, int populationSize this.maxGenerations = maxGenerations; this.populationSize = populationSize; this.fitnessThreshold = fitnessThreshold; - this.rand = new SplittableRandom(); // NOTE unseeded + this.rand = new SplittableRandom(seed); this.colors = CollectionUtil.newHashMapWithExpectedSize(graph.vertexSet().size()); + // Vertex IDs 0..n-1 this.vertexIds = new HashMap<>(); int i = 0; for (V v : graph.vertexSet()) { - vertexIds.put(v, i); + vertexIds.put(v, i++); } - this.neighborCache = new ArrayList<>(); - final NeighborCache neighborCacheJT = new NeighborCache<>(graph); + // Neighbor cache indexed by ID + this.neighborCache = new ArrayList<>(vertexCount); + for (int k = 0; k < vertexCount; k++) + neighborCache.add(null); + final NeighborCache nc = new NeighborCache<>(graph); for (V v : graph.vertexSet()) { - neighborCache.add(neighborCacheJT.neighborsOf(v).stream().map(vertexIds::get) // map vertex objects to Integer ID - .mapToInt(Integer::intValue) // Integer -> int - .toArray()); + int id = vertexIds.get(v); + int[] neigh = nc.neighborsOf(v).stream().map(vertexIds::get).mapToInt(Integer::intValue).toArray(); + neighborCache.set(id, neigh); + } + + // Degrees and degree-descending order for greedy seeding + this.degrees = new int[vertexCount]; + for (int v = 0; v < vertexCount; v++) + degrees[v] = neighborCache.get(v).length; + this.degreeOrder = new int[vertexCount]; + { + Integer[] idx = new Integer[vertexCount]; + for (int v = 0; v < vertexCount; v++) + idx[v] = v; + Arrays.sort(idx, (a, b) -> Integer.compare(degrees[b], degrees[a])); + for (int k = 0; k < vertexCount; k++) + degreeOrder[k] = idx[k]; } } @Override public Coloring getColoring() { - /* - * For PGS, attempt to find a 4-color (optimal) solution within the - * maxGenerations threshold. If a solution is not found, find a 5-color solution - * instead (much more attainable/faster). - */ if (!getSolution(4)) { getSolution(5); } @@ -121,191 +140,354 @@ private int[] neighborsOf(int v) { return neighborCache.get(v); } + private int[] greedyColoringByOrder(int[] order) { + int[] chrom = new int[vertexCount]; + Arrays.fill(chrom, -1); + boolean[] used = new boolean[colorsCount]; + int[] candidates = new int[colorsCount]; + for (int idx = 0; idx < vertexCount; idx++) { + int v = order[idx]; + Arrays.fill(used, false); + for (int u : neighborsOf(v)) { + int cu = chrom[u]; + if (cu >= 0) + used[cu] = true; + } + int n = 0; + for (int c = 0; c < colorsCount; c++) + if (!used[c]) + candidates[n++] = c; + chrom[v] = (n > 0) ? candidates[rand.nextInt(n)] : rand.nextInt(colorsCount); + } + return chrom; + } + + // DSATUR seeding (bounded to colorsCount; if blocked, choose least-conflicting + // color) + private int[] dsaturColoring() { + int[] color = new int[vertexCount]; + Arrays.fill(color, -1); + int[] sat = new int[vertexCount]; // saturation degree + int[] usedCounts = new int[vertexCount]; // temporary reuse per vertex + boolean[][] neighborColors = new boolean[vertexCount][colorsCount]; + + for (int colored = 0; colored < vertexCount; colored++) { + int v = -1, bestSat = -1, bestDeg = -1; + for (int u = 0; u < vertexCount; u++) { + if (color[u] != -1) + continue; + int s = sat[u]; + int d = degrees[u]; + if (s > bestSat || (s == bestSat && d > bestDeg)) { + bestSat = s; + bestDeg = d; + v = u; + } + } + Arrays.fill(neighborColors[v], false); + for (int w : neighborsOf(v)) + if (color[w] >= 0) + neighborColors[v][color[w]] = true; + int pick = -1, bestConf = Integer.MAX_VALUE; + for (int c = 0; c < colorsCount; c++) { + int conf = 0; + for (int w : neighborsOf(v)) + if (color[w] == c) + conf++; + if (!neighborColors[v][c]) { + pick = c; + break; + } // conflict-free available + if (conf < bestConf) { + bestConf = conf; + pick = c; + } + } + color[v] = pick; + // update saturation of neighbors + for (int w : neighborsOf(v)) { + if (color[w] == -1) { + if (!neighborColors[w][pick]) { + neighborColors[w][pick] = true; + sat[w]++; + } + } + } + } + return color; + } + + private int[] shuffledOrderFrom(int[] base) { + int[] order = base.clone(); + for (int i = order.length - 1; i > 0; i--) { + int j = rand.nextInt(i + 1); + int t = order[i]; + order[i] = order[j]; + order[j] = t; + } + return order; + } + private class Population { - private List population; // NOTE heap suitable? + private List population; private int generation = 0; + private int bestSoFar = Integer.MAX_VALUE; + private int stagnantGens = 0; + + // memetic parameters + private final int eliteCount = Math.max(2, populationSize / 5); + private final int tournamentK = 3; + private final int localSearchCap = Math.max(200, vertexCount * 10); Population() { population = new ArrayList<>(populationSize); - for (int i = 0; i < populationSize; i++) { - population.add(new Individual()); + + // Strong seeds + population.add(new Individual(dsaturColoring())); + population.add(new Individual(greedyColoringByOrder(degreeOrder))); + + int greedySeeds = Math.max(2, populationSize / 10); + for (int s = 2; s < greedySeeds; s++) { + int[] order = shuffledOrderFrom(degreeOrder); + population.add(new Individual(greedyColoringByOrder(order))); } + + while (population.size() < populationSize) + population.add(new Individual()); + sort(); + bestSoFar = bestFitness(); } - /** - * With each generation the bottom performing half of the population is removed - * and new randomly generated chromosomes are added. - */ public void nextGeneration() { - final int halfSize = populationSize / 2; - List children = new ArrayList<>(halfSize); - for (int i = 0; i < halfSize; i++) { - Parents parents = selectParents(); - Individual child = new Individual(parents); + // Elitism: keep top eliteCount + List next = new ArrayList<>(populationSize); + for (int i = 0; i < eliteCount; i++) + next.add(population.get(i)); + + // Fill the rest with children + while (next.size() < populationSize) { + Individual p1 = tournamentSelect(); + Individual p2 = tournamentSelect(); + Individual child = rand.nextBoolean() ? new Individual(new Parents(p1, p2)) : new Individual(twoPointCrossover(p1, p2)); child.mutate(); - children.add(child); - } - for (int i = 0; i < halfSize; i++) { - population.set(populationSize - i - 1, children.get(i)); + child.localSearch(localSearchCap); // memetic step + next.add(child); } + + population = next; sort(); generation++; + + int best = bestFitness(); + if (best < bestSoFar) { + bestSoFar = best; + stagnantGens = 0; + } else { + stagnantGens++; + if (stagnantGens >= 20 && best > 0) { + diversify(); + stagnantGens = 0; + bestSoFar = bestFitness(); + } + } } - /** - * - * @return the best/fittest color assignment - */ - public int[] bestIndividual() { - return population.get(0).chromosome; + private Individual tournamentSelect() { + Individual best = null; + for (int i = 0; i < tournamentK; i++) { + Individual cand = population.get(rand.nextInt(populationSize)); + if (best == null || cand.fitness < best.fitness) + best = cand; + } + return best; } - public int bestFitness() { - return population.get(0).fitness; + private Parents twoPointCrossover(Individual a, Individual b) { + int i = rand.nextInt(vertexCount); + int j = rand.nextInt(vertexCount); + if (i > j) { + int t = i; + i = j; + j = t; + } + int[] child = new int[vertexCount]; + System.arraycopy(a.chromosome, 0, child, 0, i); + System.arraycopy(b.chromosome, i, child, i, j - i); + System.arraycopy(a.chromosome, j, child, j, vertexCount - j); + Individual wrapped = new Individual(child); + return new Parents(wrapped, wrapped); // reuse Individual(Parents) signature via a wrapper } - public int generation() { - return generation; + private void diversify() { + // re-seed the weakest third + int start = (int) (populationSize * 0.67); + for (int i = start; i < populationSize; i++) { + if (rand.nextDouble() < 0.5) { + population.set(i, new Individual()); + } else { + if (rand.nextBoolean()) + population.set(i, new Individual(dsaturColoring())); + else + population.set(i, new Individual(greedyColoringByOrder(shuffledOrderFrom(degreeOrder)))); + } + } + sort(); } - /** - * Choosing a parent selection and mutation method depends on the state of the - * population and how close it is to finding a solution. - *

- * If the best fitness is greater than {@code fitnessThreshold} then - * parentSelection1 and mutation1 are used. Otherwise, parentSelection2 and - * mutation2 are used. This alteration is the result of experimenting with the - * different data sets. It was observed that when the best fitness score is low - * (i.e. approaching an optimum) the usage of parent selection 2 (which copies - * the best chromosome as the new child) along with mutation2 (which randomly - * selects a color for the violating vertex) results in a solution more often - * and more quickly than using the other two respective methods. - */ - private Parents selectParents() { - return bestFitness() > fitnessThreshold ? selectParents1() : selectParents2(); + public int[] bestIndividual() { + return population.get(0).chromosome; } - private Parents selectParents1() { - Individual tempParent1, tempParent2, parent1, parent2; - tempParent1 = population.get(rand.nextInt(populationSize)); - do { - tempParent2 = population.get(rand.nextInt(populationSize)); - } while (tempParent1 == tempParent2); - parent1 = (tempParent1.fitness > tempParent2.fitness ? tempParent2 : tempParent1); - do { - tempParent1 = population.get(rand.nextInt(populationSize)); - do { - tempParent2 = population.get(rand.nextInt(populationSize)); - } while (tempParent1 == tempParent2); - parent2 = (tempParent1.fitness > tempParent2.fitness ? tempParent2 : tempParent1); - } while (parent1 == parent2); - return new Parents(parent1, parent2); + public int bestFitness() { + return population.get(0).fitness; } - private Parents selectParents2() { - return new Parents(population.get(0), population.get(1)); + public int generation() { + return generation; } private void sort() { population.sort(Comparator.comparingInt(m -> m.fitness)); } - /** - * A candidate graph coloring. - */ private class Individual { - /** - * each element of chromosome represents a color of a vertex - */ private int[] chromosome; - /** - * fitness is defined as the number of 'bad' edges, i.e., edges connecting two - * vertices with the same color - */ private int fitness; - /** - * Instantiate a random individual. - */ Individual() { chromosome = new int[vertexCount]; - for (int i = 0; i < vertexCount; i++) { + for (int i = 0; i < vertexCount; i++) chromosome[i] = rand.nextInt(colorsCount); - } scoreFitness(); } - // crossover - Individual(Parents parents) { - chromosome = new int[vertexCount]; - final int crosspoint = rand.nextInt(vertexCount); - System.arraycopy(parents.parent1.chromosome, 0, chromosome, 0, crosspoint); - System.arraycopy(parents.parent2.chromosome, crosspoint, chromosome, crosspoint, vertexCount - crosspoint); + Individual(int[] chromosome) { + this.chromosome = chromosome.clone(); scoreFitness(); } - public void mutate() { - if (bestFitness() > fitnessThreshold) { - mutate1(); - } else { - mutate2(); + // Uniform crossover favoring agreement + Individual(Parents parents) { + chromosome = new int[vertexCount]; + final int[] p1 = parents.parent1.chromosome; + final int[] p2 = parents.parent2.chromosome; + for (int i = 0; i < vertexCount; i++) { + int c1 = p1[i], c2 = p2[i]; + chromosome[i] = (c1 == c2) ? c1 : (rand.nextBoolean() ? c1 : c2); } + scoreFitness(); } - private void mutate1() { - for (int v = 0; v < vertexCount; v++) { - for (int w : neighborsOf(v)) { - if (chromosome[v] == chromosome[w]) { - HashSet validColors = new HashSet<>(); - for (int c = 0; c < colorsCount; c++) { - validColors.add(c); - } - for (int u : neighborsOf(v)) { - validColors.remove(chromosome[u]); + public void mutate() { + // Conflict-directed mutation + boolean[] used = new boolean[colorsCount]; + int[] candidates = new int[colorsCount]; + + // early phase vs late phase adapts intensity + int passes = (bestFitness() > fitnessThreshold) ? 2 : 1; + for (int pass = 0; pass < passes; pass++) { + for (int v = 0; v < vertexCount; v++) { + int cv = chromosome[v]; + boolean conflict = false; + for (int w : neighborsOf(v)) { + if (cv == chromosome[w]) { + conflict = true; + break; } - if (!validColors.isEmpty()) { - chromosome[v] = (int) validColors.toArray()[rand.nextInt(validColors.size())]; + } + if (!conflict) + continue; + + // try best color for v + Arrays.fill(used, false); + for (int u : neighborsOf(v)) + used[chromosome[u]] = true; + int n = 0; + for (int c = 0; c < colorsCount; c++) + if (!used[c]) + candidates[n++] = c; + if (n > 0) { + chromosome[v] = candidates[rand.nextInt(n)]; + } else { + // pick color minimizing conflicts + int bestC = cv, bestConf = Integer.MAX_VALUE; + for (int c = 0; c < colorsCount; c++) { + int conf = 0; + for (int u : neighborsOf(v)) + if (chromosome[u] == c) + conf++; + if (conf < bestConf) { + bestConf = conf; + bestC = c; + } } - break; + chromosome[v] = bestC; } } } scoreFitness(); } - private void mutate2() { - for (int v = 0; v < vertexCount; v++) { - for (int w : neighborsOf(v)) { - if (chromosome[v] == chromosome[w]) { - chromosome[v] = rand.nextInt(colorsCount); - break; + // Local search: steepest-descent conflict repair with cap + void localSearch(int cap) { + int moves = 0; + boolean improved = true; + int[] colorCounts = new int[colorsCount]; + + while (improved && moves < cap && fitness > 0) { + improved = false; + + // Build a random permutation of vertices to reduce bias + int[] order = shuffledOrderFrom(degreeOrder); + for (int idx = 0; idx < vertexCount && moves < cap; idx++) { + int v = order[idx]; + int cv = chromosome[v]; + + int currentConf = 0; + for (int w : neighborsOf(v)) + if (chromosome[w] == cv) + currentConf++; + if (currentConf == 0) + continue; + + Arrays.fill(colorCounts, 0); + for (int w : neighborsOf(v)) + colorCounts[chromosome[w]]++; + + int bestC = cv; + int bestScore = currentConf; + for (int c = 0; c < colorsCount; c++) { + int score = colorCounts[c]; + if (score < bestScore || (score == bestScore && c < bestC)) { + bestScore = score; + bestC = c; + } + } + if (bestC != cv) { + chromosome[v] = bestC; + moves++; + // Recompute fitness lazily after a batch; here recompute fully occasionally + if ((moves & 15) == 0) + scoreFitness(); + improved = true; } } + // finalize fitness recompute + scoreFitness(); } - scoreFitness(); } - /** - * A bad edge is defined as an edge connecting two vertices that have the same - * color. The number of bad edges is the fitness score for the chromosome - * (higher number is worse fitness). - */ private void scoreFitness() { int f = 0; for (int v = 0; v < vertexCount; v++) { - for (int w : neighborsOf(v)) { - if (chromosome[v] == chromosome[w]) { + int cv = chromosome[v]; + for (int w : neighborsOf(v)) + if (w > v && cv == chromosome[w]) f++; - } - } } - /* - * Divide by 2 to account for double counting. Doesn't really matter if we're - * simply using fitness score to sort individuals. - */ - fitness = f / 2; + fitness = f; } } diff --git a/src/main/java/micycle/pgs/commons/MinimumBoundingTriangle.java b/src/main/java/micycle/pgs/commons/MinimumBoundingTriangle.java deleted file mode 100644 index 73407197..00000000 --- a/src/main/java/micycle/pgs/commons/MinimumBoundingTriangle.java +++ /dev/null @@ -1,395 +0,0 @@ -package micycle.pgs.commons; - -import static java.lang.Math.floorMod; - -import java.util.Arrays; - -import org.locationtech.jts.geom.Coordinate; -import org.locationtech.jts.geom.Geometry; -import org.locationtech.jts.geom.GeometryFactory; -import org.locationtech.jts.geom.Polygon; -import org.locationtech.jts.geom.PrecisionModel; - -/** - * Computes the Minimum Bounding Triangle (MBT) for the points in a Geometry. - * The MBT is the smallest triangle which covers all the input points (this is - * also known as the Smallest Enclosing Triangle). - *

- * The algorithm for finding minimum area enclosing triangles is based on an - * elegant geometric characterisation initially introduced in Klee & Laskowski. - * The algorithm iterates over each edge of the convex polygon setting side C of - * the enclosing triangle to be flush with this edge. A side S is said to - * be flush with edge E if S⊇E. The authors of O’Rourke et al. - * prove that for each fixed flush side C a local minimum enclosing triangle - * exists. Moreover, the authors have shown that: - *

    - *
  • The midpoints of the enclosing triangle’s sides must touch the - * polygon.
  • - *
  • There exists a local minimum enclosing triangle with at least two sides - * flush with edges of the polygon. The third side of the triangle can be either - * flush with an edge or tangent to the polygon.
  • - *
- * Thus, for each flush side C the algorithm will find the second flush side and - * set the third side either flush/tangent to the polygon. - *

- * O'Rourke provides a θ(n) algorithm for finding the minimal enclosing triangle - * of a 2D convex polygon with n vertices. However, the overall - * complexity for the concave computation is O(nlog(n)) because a convex hull - * must first be computed for the input geometry. - * - * @author Python implementation - * by Charlie Marsh - * @author Java port by Michael Carleton - * - */ -public class MinimumBoundingTriangle { - - private static final GeometryFactory GEOM_FACTORY = new GeometryFactory(new PrecisionModel(PrecisionModel.FLOATING_SINGLE)); - private static final double EPSILON = 0.01; // account for floating-point errors / within-distance thresold - - private final int n; - private final Coordinate[] points; - - /** - * Creates a new instance of a Maximum Inscribed Triangle computation. - * - * @param shape an areal geometry - */ - public MinimumBoundingTriangle(Geometry shape) { - shape = shape.convexHull(); - points = Arrays.copyOfRange(shape.getCoordinates(), 0, shape.getCoordinates().length - 1); // treat coordinates as unclosed - n = points.length; - } - - /** - * Gets a geometry which represents the Minimium Bounding Triangle. - * - * @return a triangle Geometry representing the Minimum Bounding Triangle - */ - public Geometry getTriangle() { - int a = 1; - int b = 2; - - double minArea = Double.MAX_VALUE; - Polygon optimalTriangle = null; - - for (int i = 0; i < n; i++) { - TriangleForIndex tForIndex = new TriangleForIndex(i, a, b); - Polygon triangle = tForIndex.triangle; - a = tForIndex.aOut; - b = tForIndex.bOut; - if (triangle != null) { - double area = triangle.getArea(); - /* - * If the found enclosing triangle is valid/minimal and its area is less than - * the area of the optimal enclosing triangle found so far, then the optimal - * enclosing triangle is updated. - */ - if (optimalTriangle == null || area < minArea) { - optimalTriangle = triangle; - minArea = area; - } - } - } - - return optimalTriangle; - } - - /** - * Computes the minimal triangle with edge C flush to vertex c. - * - * Abstracted into class (during Java port) to better structure the many - * methods. - */ - private class TriangleForIndex { - - // return values - final int aOut, bOut; - final Polygon triangle; - - private final Side sideC; - private Side sideA, sideB; - - TriangleForIndex(int c, int a, int b) { - a = Math.max(a, c + 1) % n; - b = Math.max(b, c + 2) % n; - sideC = side(c); - - /* - * A necessary condition for finding a minimum enclosing triangle is that b is - * on the right chain and a on the left. The first step inside the loop is - * therefore to move the index b on the right chain using the onLeftChain() - * subalgorithm. - */ - while (onLeftChain(b)) { - b = (b + 1) % n; - } - - /* - * The next condition which must be fulfilled is that a and b must be critical, - * or high. The incrementLowHigh() subalgorithm advances a and b until this - * condition is fulfilled. - */ - while (dist(b, sideC) > dist(a, sideC)) { // Increment a if low, b if high - int[] ab = incrementLowHigh(a, b); - a = ab[0]; - b = ab[1]; - } - - /* - * Next, b will be advanced until [gamma(a) b] is tangent to the convex polygon - * via the tangency() subalgorithm. - */ - while (tangency(a, b)) { // Search for b tangency - b = (b + 1) % n; - } - - Coordinate gammaB = gamma(points[b], side(a), sideC); - if (low(b, gammaB) || dist(b, sideC) < dist((a - 1) % n, sideC)) { - sideB = side(b); - sideA = side(a); - sideB = new Side(sideC.intersection(sideB), sideA.intersection(sideB)); - - if (dist(sideB.midpoint(), sideC) < dist(floorMod(a - 1, n), sideC)) { - Coordinate gammaA = gamma(points[floorMod(a - 1, n)], sideB, sideC); - sideA = new Side(gammaA, points[floorMod(a - 1, n)]); - } - } else { - gammaB = gamma(points[b], side(a), sideC); - sideB = new Side(gammaB, points[b]); - sideA = new Side(gammaB, points[floorMod(a - 1, n)]); - } - - // Calculate final intersections - final Coordinate vertexA = sideC.intersection(sideB); - final Coordinate vertexB = sideC.intersection(sideA); - final Coordinate vertexC = sideA.intersection(sideB); - - // Check if triangle is valid local minimum - if (!isValidTriangle(vertexA, vertexB, vertexC, a, b, c)) { - triangle = null; - } else { - triangle = GEOM_FACTORY.createPolygon(new Coordinate[] { vertexA, vertexB, vertexC, vertexA }); - } - - aOut = a; - bOut = b; - } - - /** - * Computes the distance from the point (specified by its index) to the side. - */ - private double dist(int point, Side side) { - return side.distance(points[floorMod(point, points.length)]); - } - - /** - * Computes the distance from the point to the side. - */ - private double dist(Coordinate point, Side side) { - return side.distance(point); - } - - /** - * Calculate the point on the side 'on' that is twice as far from 'base' as - * 'point'. More formally, point γ(p) (Gamma) is the point on the line [a, a−1] - * such that h(γ(p))=2 × h(p), where h(p) is the distance of p from line - * determined by side C. - */ - private Coordinate gamma(Coordinate point, Side on, Side base) { - Coordinate intersection = on.intersection(base); - if (intersection != null) { - double dist = 2 * dist(point, base); - // Calculate differential change in distance - if (on.vertical) { - double ddist = dist(new Coordinate(intersection.x, intersection.y + 1), base); - Coordinate guess = new Coordinate(intersection.x, intersection.y + dist / ddist); - if (ccw(base.p1, base.p2, guess) != ccw(base.p1, base.p2, point)) { - guess = new Coordinate(intersection.x, intersection.y - dist / ddist); - } - return guess; - } else { - double ddist = dist(on.atX(intersection.x + 1), base); - Coordinate guess = on.atX(intersection.x + dist / ddist); - if (ccw(base.p1, base.p2, guess) != ccw(base.p1, base.p2, point)) { - guess = on.atX(intersection.x - dist / ddist); - } - return guess; - } - } - return intersection; - } - - private boolean onLeftChain(int b) { - return dist((b + 1) % n, sideC) >= dist(b, sideC); - } - - /** - * @return [a, b] tuple - */ - private int[] incrementLowHigh(int a, int b) { - Coordinate gammaA = gamma(points[a], side(a), sideC); - - if (high(b, gammaA)) { - b = (b + 1) % n; - } else { - a = (a + 1) % n; - } - return new int[] { a, b }; - } - - private boolean tangency(int a, int b) { - Coordinate gammaB = gamma(points[b], side(a), sideC); - return dist(b, sideC) >= dist((a - 1) % n, sideC) && high(b, gammaB); - } - - private boolean high(int b, Coordinate gammaB) { - // Test if two adjacent vertices are on same side of line (implies tangency) - if (ccw(gammaB, points[b], points[floorMod(b - 1, n)]) == ccw(gammaB, points[b], points[(b + 1) % n])) { - return false; - } - - // Test if Gamma and B are on same side of line from adjacent vertices - if (ccw(points[floorMod(b - 1, n)], points[(b + 1) % n], gammaB) == ccw(points[floorMod(b - 1, n)], points[(b + 1) % n], - points[b])) { - return dist(gammaB, sideC) > dist(b, sideC); - } else { - return false; - } - } - - private boolean low(int b, Coordinate gammaB) { - // Test if two adjacent vertices are on same side of line (implies tangency) - if (ccw(gammaB, points[b], points[floorMod(b - 1, n)]) == ccw(gammaB, points[b], points[(b + 1) % n])) { - return false; - } - - // Test if Gamma and B are on same side of line from adjacent vertices - if (ccw(points[floorMod(b - 1, n)], points[(b + 1) % n], gammaB) == ccw(points[floorMod(b - 1, n)], points[(b + 1) % n], - points[b])) { - return false; - } else { - return dist(gammaB, sideC) > dist(b, sideC); - } - } - - /** - * Tests whether the chain formed by A, B, and C is counter-clockwise. - */ - private boolean ccw(Coordinate a, Coordinate b, Coordinate c) { - return (b.x - a.x) * (c.y - a.y) > (b.y - a.y) * (c.x - a.x); - } - - /** - * Checks that a triangle composed of the given vertices is a valid local - * minimum (entails that all midpoints of the triangle should touch the - * polygon). - * - * @param vertexA Vertex A of the enclosing triangle - * @param vertexB Vertex B of the enclosing triangle - * @param vertexC Vertex C of the enclosing triangle - */ - private boolean isValidTriangle(Coordinate vertexA, Coordinate vertexB, Coordinate vertexC, int a, int b, int c) { - if (vertexA == null || vertexB == null || vertexC == null) { - return false; - } - Coordinate midpointA = midpoint(vertexC, vertexB); - Coordinate midpointB = midpoint(vertexA, vertexC); - Coordinate midpointC = midpoint(vertexA, vertexB); - return (validateMidpoint(midpointA, a) && validateMidpoint(midpointB, b) && validateMidpoint(midpointC, c)); - } - - /** - * Checks that a midpoint touches the polygon on the appropriate side. - */ - private boolean validateMidpoint(Coordinate midpoint, int index) { - Side s = side(index); - - if (s.vertical) { - if (midpoint.x != s.p1.x) { - return false; - } - double maxY = Math.max(s.p1.y, s.p2.y) + EPSILON; - double minY = Math.min(s.p1.y, s.p2.y) - EPSILON; - return (midpoint.y <= maxY && midpoint.y >= minY); - } else { - double maxX = Math.max(s.p1.x, s.p2.x) + EPSILON; - double minX = Math.min(s.p1.x, s.p2.x) - EPSILON; - // Must touch polygon - if (!(midpoint.x <= maxX && midpoint.x >= minX)) { - return false; - } - - return (s.atX(midpoint.x).distance(midpoint) < EPSILON); - } - } - - private Side side(final int i) { - return new Side(points[floorMod(i - 1, n)], points[i]); - } - - private Coordinate midpoint(Coordinate a, Coordinate b) { - return new Coordinate((a.x + b.x) / 2, (a.y + b.y) / 2); - } - } - - /** - * Helper class representing a side, or edge. - */ - private class Side { - - final Coordinate p1, p2; - final double slope, intercept; - final boolean vertical; - - Side(Coordinate p1, Coordinate p2) { - this.p1 = p1; - this.p2 = p2; - slope = (p2.y - p1.y) / (p2.x - p1.x); - intercept = p1.y - slope * p1.x; - vertical = p1.x == p2.x; - } - - private double sqrDistance(Coordinate p) { - double numerator = (p2.x - p1.x) * (p1.y - p.y) - (p1.x - p.x) * (p2.y - p1.y); - numerator *= numerator; - double denominator = (p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y); - return numerator / denominator; - } - - /** - * @return Returns the distance of p from the line - */ - private double distance(Coordinate p) { - return Math.sqrt(sqrDistance(p)); - } - - private Coordinate atX(double x) { - if (vertical) { - return p1; // NOTE return p1 (though incorrect) rather than null - } - return new Coordinate(x, slope * x + intercept); - } - - private Coordinate intersection(Side that) { - if (that.slope == slope) { - return null; - } - - if (vertical) { - return that.atX(p1.x); - } else if (that.vertical) { - return atX(that.p1.x); - } - - double x = (intercept - that.intercept) / (that.slope - slope); - return atX(x); - } - - private Coordinate midpoint() { - return new Coordinate((p1.x + p2.x) / 2, (p1.y + p2.y) / 2); - } - } - -} diff --git a/src/main/java/micycle/pgs/commons/SoftCells.java b/src/main/java/micycle/pgs/commons/SoftCells.java new file mode 100644 index 00000000..98948311 --- /dev/null +++ b/src/main/java/micycle/pgs/commons/SoftCells.java @@ -0,0 +1,537 @@ +package micycle.pgs.commons; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; + +import processing.core.PConstants; +import processing.core.PShape; +import processing.core.PVector; + +/** + * Softer tesselations. + * + * A Java implementation of the algorithm described in the paper "A Generative + * Approach to Smooth Tessellations" (PNAS Nexus, 2024). + * + *

+ * This class generates and renders smooth, curved tessellations using Bezier + * curves to soften the edges of a base mesh. The algorithm supports various + * tangent modes to control the direction and curvature of the edges, enabling a + * wide range of artistic and geometric effects. + *

+ * + *

+ * The algorithm is highly customizable, supporting multiple tangent modes and + * input meshes. It can be used for generative art, architectural design, and + * scientific visualization. + *

+ * + * @see Original + * Paper + * @author Michael Carleton + * @author CLAUDIO ESPERANÇA + */ +public class SoftCells { + + // https://openprocessing.org/sketch/2419830 + + /** + * Tangents are computed for each edge based on the selected tangent mode. These + * tangents control the curvature of the Bezier curves used to render the edges. + * + * Each tangent is scaled to half the length of the shortest edge incident to + * the source vertex. Unless otherwise noted, the direction is flipped per edge + * so the tangent generally points in the same hemisphere as the edge. + */ + public enum TangentMode { + /** Fixed +X direction for all edges of the vertex; no per-edge flip. */ + HORIZONTAL, + /** Fixed +Y direction for all edges of the vertex; no per-edge flip. */ + VERTICAL, + /** + * 45° diagonal with slope +1 (vector (1,1)); flipped per edge to align via dot + * sign. + */ + DIAGONAL, + /** + * 45° diagonal with slope −1 (vector (1,−1)); flipped per edge to align via dot + * sign. + */ + DIAGONAL2, + /** + * Randomly picks one of the two 45° diagonals per vertex; flipped per edge to + * align. + */ + RANDOM_DIAGONAL, + /** + * Picks one of three directions 60° apart per vertex; flipped per edge to + * align. + */ + RANDOM_60DEG, + /** + * Uses the sum/average of incident edge directions; flipped per edge to align. + */ + ADAPTIVE, + /** Uniformly random direction per vertex; flipped per edge to align. */ + RANDOM, + /** + * Horizontal base; sign determined by row parity and the edge’s vertical sign. + * Edges that are nearly horizontal (abs(u.y) < 1e-3) align to their own + * direction. + */ + EVEN_ODD, + /** + * Alternates between the two 45° diagonals by (col+row) parity; flipped per + * edge to align. + */ + ALT_DIAGONAL, + /** + * Cycles through three 60° directions by (col+row)%3; flipped per edge to + * align. + */ + ALT_60DEG + } + + private List points = new ArrayList<>(); + private List faceMap = new ArrayList<>(); + private Map edgeTangentMap = new HashMap<>(); + private List> vertexEdgeMap = new ArrayList<>(); + + private float avgSize = 1f; + private float minX = 0, minY = 0, maxX = 0, maxY = 0; + + // deterministic RNG + private Random rng = new Random(0L); + + public SoftCells() { + this(0L); + } + + public SoftCells(long seed) { + setSeed(seed); + } + + /** + * Convenience method: load mesh, compute tangents, build shape in one call. + */ + public PShape generate(PShape mesh, TangentMode mode, float ratio) { + loadMeshFromPShape(mesh); + computeTangents(mode); + return buildSoftFacesShape(ratio); + } + + public void setSeed(long seed) { + this.rng = new Random(seed); + } + + /** + * Loads mesh data from a PShape and populates face/vertex data structures. + * Assumes the input mesh is conforming, meaning vertices are shared exactly + * between faces. + * + * @param mesh PShape containing the mesh (each child is a face polygon) + */ + public void loadMeshFromPShape(final PShape mesh) { + // 1) reset + resetDataStructures(); + + // 2) collect unique vertices by value + final Set unique = new LinkedHashSet<>(); + for (int f = 0; f < mesh.getChildCount(); f++) { + final PShape face = mesh.getChild(f); + for (int v = 0; v < face.getVertexCount(); v++) { + unique.add(face.getVertex(v)); + } + } + + // 3) build and sort points (x, then y, then z) + points = new ArrayList<>(unique); + points.sort(Comparator.comparing(p -> p.x).thenComparing(p -> p.y)); + + // 4) init adjacency and index map (value-based) + vertexEdgeMap = new ArrayList<>(points.size()); + for (int i = 0; i < points.size(); i++) { + vertexEdgeMap.add(new ArrayList<>()); + } + final Map index = new HashMap<>(points.size() * 2); + for (int i = 0; i < points.size(); i++) { + index.put(points.get(i), i); + } + + // 5) faces + adjacency using sorted indices + for (int f = 0; f < mesh.getChildCount(); f++) { + final PShape face = mesh.getChild(f); + final int n = face.getVertexCount(); + final int[] faceVerts = new int[n]; + + for (int v = 0; v < n; v++) { + final PVector p = face.getVertex(v); + final Integer idx = index.get(p); + if (idx == null) { + throw new IllegalStateException("Vertex not in index map: " + p); + } + faceVerts[v] = idx; + } + faceMap.add(faceVerts); + + for (int i = 0; i < n; i++) { + final int a = faceVerts[i]; + final int b = faceVerts[(i + 1) % n]; + addUnique(vertexEdgeMap.get(a), b); + addUnique(vertexEdgeMap.get(b), a); + } + } + + // 6) finalise + sortVertexNeighborsByAngle(); + computeBoundsAndAvgSize(); + } + + private void resetDataStructures() { + points = new ArrayList<>(); + faceMap = new ArrayList<>(); + vertexEdgeMap = new ArrayList<>(); + edgeTangentMap = new HashMap<>(); + avgSize = 1f; + minX = minY = maxX = maxY = 0; + } + + private void addUnique(final List list, final int value) { + if (!list.contains(value)) { + list.add(value); + } + } + + private void sortVertexNeighborsByAngle() { + for (int i = 0; i < points.size(); i++) { + final PVector center = points.get(i); + final List neighbors = vertexEdgeMap.get(i); + + neighbors.sort((a, b) -> { + final PVector va = PVector.sub(points.get(a), center); + final PVector vb = PVector.sub(points.get(b), center); + return Float.compare(va.heading(), vb.heading()); + }); + } + } + + private void computeBoundsAndAvgSize() { + if (points.isEmpty()) { + avgSize = 1f; + return; + } + + minX = maxX = points.get(0).x; + minY = maxY = points.get(0).y; + + for (PVector p : points) { + if (p.x < minX) { + minX = p.x; + } + if (p.y < minY) { + minY = p.y; + } + if (p.x > maxX) { + maxX = p.x; + } + if (p.y > maxY) { + maxY = p.y; + } + } + + // average unique edge length + double sum = 0.0; + int cnt = 0; + for (int i = 0; i < points.size(); i++) { + final PVector pi = points.get(i); + for (int j : vertexEdgeMap.get(i)) { + if (j > i) { // unique edge (i,j) with i 0) ? (float) (sum / cnt) : Math.max(1f, (maxX - minX + maxY - minY) * 0.01f); + if (avgSize <= 0) { + avgSize = 1f; + } + } + + public void computeTangents(final TangentMode mode) { + edgeTangentMap = new HashMap<>(); + for (int i = 0; i < points.size(); i++) { + final TangentEstimatorFunc tangentFunc = getTangentEstimator(mode, i); + final List neighbors = vertexEdgeMap.get(i); + if (neighbors != null) { + for (int k = 0; k < neighbors.size(); k++) { + final int j = neighbors.get(k); + final PVector tangent = tangentFunc.estimateTangent(k, i, j); + edgeTangentMap.put(getKey(i, j), tangent); + } + } + } + } + + private int getKey(final int src, final int dst) { + return points.size() * src + dst; + } + + /** + * Creates a tangent estimator function for a given vertex based on the + * specified tangent mode. + * + *

+ * This method is a core part of the algorithm for generating smooth, curved + * tessellations. It determines the direction and magnitude of tangents for each + * edge connected to a vertex, which are later used to compute Bezier curves for + * rendering the tessellation. The tangent estimator function returned by this + * method is specific to a single vertex and is applied to all its neighboring + * edges. + *

+ * + *

+ * The tangent mode defines how tangents are calculated: + *

    + *
  • Fixed Directions: Modes like {@link TangentMode#HORIZONTAL} and + * {@link TangentMode#DIAGONAL} use predefined directions (e.g., horizontal, + * vertical, or diagonal) to compute tangents.
  • + *
  • Adaptive Directions: Modes like {@link TangentMode#ADAPTIVE} + * calculate tangents based on the average direction of neighboring edges, + * ensuring smooth transitions.
  • + *
  • Randomized Directions: Modes like {@link TangentMode#RANDOM} and + * {@link TangentMode#RANDOM_60DEG} generate random directions for tangents, + * either uniformly or constrained to specific angles (e.g., 60°).
  • + *
+ *

+ * + *

+ * The tangent estimator function returned by this method takes the index of a + * neighboring edge and computes a tangent vector for that edge. The tangent + * vector is scaled to half the length of the shortest neighboring edge (to + * ensure smooth curvature) and is oriented based on the edge's direction and + * the chosen tangent mode. + *

+ * + * @param mode The tangent mode to use for computing tangents. This determines + * how tangent directions are calculated. + * @param i The index of the vertex for which the tangent estimator is being + * created. + * @return A {@link TangentEstimatorFunc} that computes tangent vectors for + * edges connected to the vertex. + */ + private TangentEstimatorFunc getTangentEstimator(final TangentMode mode, final int i) { + final List neighbors = vertexEdgeMap.get(i); + final List neighborVectors = new ArrayList<>(); + final PVector srcPoint = points.get(i); + + // build the list of edge-vectors emanating from vertex i + for (final int nbr : neighbors) { + neighborVectors.add(PVector.sub(points.get(nbr), srcPoint)); + } + + // compute average length, minimum length and sum-direction + float avgLen = 0; + float minLen = Float.MAX_VALUE; + final PVector avgDir = new PVector(); + if (!neighborVectors.isEmpty()) { + for (final PVector v : neighborVectors) { + final float m = v.mag(); + avgLen += m; + minLen = Math.min(minLen, m); + avgDir.add(v); + } + avgLen /= neighborVectors.size(); + } else { + minLen = 0; + } + + final float halfMin = minLen * 0.5f; + // make any base directions we’ll need (already scaled to halfMin) + final PVector horiz = new PVector(1, 0).mult(halfMin); + final PVector vert = new PVector(0, 1).mult(halfMin); + final PVector diag1 = new PVector(1, 1).normalize().mult(halfMin); + final PVector diag2 = new PVector(1, -1).normalize().mult(halfMin); + + // PRE‐CHOOSING random direction once per vertex: + // note: this is never mutated later + final float TWO_PI = (float) (Math.PI * 2.0); + final PVector randDir = PVector.fromAngle(rng.nextFloat() * TWO_PI).mult(halfMin); + final PVector randDiagonal = (rng.nextFloat() < 0.5f ? diag1 : diag2); + + // adaptive means sum-direction + final PVector adaptiveDir = avgDir.copy().setMag(halfMin); + + switch (mode) { + case HORIZONTAL : + return (k, src, dst) -> { + // WORK ON A COPY: + return horiz.copy(); + }; + + case VERTICAL : + return (k, src, dst) -> { + return vert.copy(); + }; + + case DIAGONAL : + return (k, src, dst) -> { + final PVector dir = diag1.copy(); + if (neighborVectors.get(k).dot(dir) < 0) { + dir.mult(-1); + } + return dir; + }; + + case DIAGONAL2 : + return (k, src, dst) -> { + final PVector dir = diag2.copy(); + if (neighborVectors.get(k).dot(dir) < 0) { + dir.mult(-1); + } + return dir; + }; + + case RANDOM_DIAGONAL : + // randDiagonal was chosen once above, do NOT re‐roll per edge + return (k, src, dst) -> { + final PVector dir = randDiagonal.copy(); + if (neighborVectors.get(k).dot(dir) < 0) { + dir.mult(-1); + } + return dir; + }; + + case RANDOM_60DEG : + // pick one of three 60° directions once per vertex + final PVector[] tris = { new PVector(0, 1), new PVector(0, 1).rotate((float) Math.toRadians(60)), + new PVector(0, 1).rotate((float) Math.toRadians(-60)) }; + final PVector triDir = tris[rng.nextInt(tris.length)].normalize().mult(halfMin); + return (k, src, dst) -> { + final PVector dir = triDir.copy(); + if (neighborVectors.get(k).dot(dir) < 0) { + dir.mult(-1); + } + return dir; + }; + + case ADAPTIVE : + return (k, src, dst) -> { + final PVector dir = adaptiveDir.copy(); + if (neighborVectors.get(k).dot(dir) < 0) { + dir.mult(-1); + } + return dir; + }; + + case RANDOM : + return (k, src, dst) -> { + final PVector dir = randDir.copy(); + if (neighborVectors.get(k).dot(dir) < 0) { + dir.mult(-1); + } + return dir; + }; + + case EVEN_ODD : + return (k, src, dst) -> { + final int col = (int) ((srcPoint.x - minX) / avgSize); + final int row = (int) ((srcPoint.y - minY) / avgSize); + final PVector base = horiz.copy(); + final PVector u = neighborVectors.get(k); + // if perfectly horizontal edge, just align + if (Math.abs(u.y) < 1e-3) { + if (u.dot(base) < 0) { + base.mult(-1); + } + return base; + } + if (row % 2 == 0) { + if (u.y < 0) { + base.mult(-1); + } + } else { + if (u.y > 0) { + base.mult(-1); + } + } + return base; + }; + + case ALT_DIAGONAL : + return (k, src, dst) -> { + final int col = (int) ((srcPoint.x - minX) / avgSize); + final int row = (int) ((srcPoint.y - minY) / avgSize); + final PVector pick = ((col + row) % 2 == 0 ? diag1 : diag2).copy(); + if (neighborVectors.get(k).dot(pick) < 0) { + pick.mult(-1); + } + return pick; + }; + + case ALT_60DEG : + return (k, src, dst) -> { + final int col = (int) ((srcPoint.x - minX) / avgSize); + final int row = (int) ((srcPoint.y - minY) / avgSize); + final PVector[] altTris = { new PVector(0, 1), new PVector(0, 1).rotate((float) Math.toRadians(60)), + new PVector(0, 1).rotate((float) Math.toRadians(-60)) }; + final PVector pick = altTris[(Math.abs(col + row)) % 3].normalize().mult(halfMin); + if (neighborVectors.get(k).dot(pick) < 0) { + pick.mult(-1); + } + return pick; + }; + + default : + return (k, src, dst) -> new PVector(0, 0); + } + } + + interface TangentEstimatorFunc { + PVector estimateTangent(int k, int srcIndex, int dstIndex); + } + + public PShape buildSoftFacesShape(final float ratio) { + final PShape grp = new PShape(PConstants.GROUP); + + for (final int[] vtx : faceMap) { + final PShape poly = new PShape(PShape.PATH); + poly.setStroke(0); + poly.setStroke(true); + poly.setStrokeWeight(2); + poly.setFill(255); + poly.setFill(true); + + poly.beginShape(); + int i = vtx[vtx.length - 1]; + PVector p1 = points.get(i); + poly.vertex(p1.x, p1.y); + + for (final int j : vtx) { + final PVector p2 = points.get(j); + final PVector t1 = edgeTangentMap.get(points.size() * i + j); + final PVector t2 = edgeTangentMap.get(points.size() * j + i); + + if (t1 != null && t2 != null) { + poly.bezierVertex(p1.x + t1.x * ratio, p1.y + t1.y * ratio, p2.x + t2.x * ratio, p2.y + t2.y * ratio, p2.x, p2.y); + } else { + poly.vertex(p2.x, p2.y); + } + i = j; + p1 = p2; + } + + poly.endShape(PConstants.CLOSE); + grp.addChild(poly); + } + + return grp; + } +} \ No newline at end of file diff --git a/src/main/java/micycle/pgs/commons/VisibilityPolygon.java b/src/main/java/micycle/pgs/commons/VisibilityPolygon.java index 00c89b9d..6dd288f4 100644 --- a/src/main/java/micycle/pgs/commons/VisibilityPolygon.java +++ b/src/main/java/micycle/pgs/commons/VisibilityPolygon.java @@ -5,6 +5,8 @@ import java.util.Collections; import java.util.List; +import org.locationtech.jts.algorithm.Angle; +import org.locationtech.jts.algorithm.LineIntersector; import org.locationtech.jts.algorithm.RobustLineIntersector; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; @@ -23,11 +25,14 @@ import net.jafama.FastMath; /** + * * This class computes an isovist, which is the volume of space visible from a * specific point in space, based on a given set of original segments. + * *

* The code in this class is adapted from Byron Knoll's javascript library, * available at https://github.com/byronknoll/visibility-polygon-js + * *

*

    *
  • Sort all vertices based on their angle to the observer.
  • @@ -57,30 +62,39 @@ *
* * @author Nicolas Fortin of Ifsttar UMRAE - * @author Small changes by Michael Carleton + * @author Changes by Michael Carleton */ public class VisibilityPolygon { - // from h2gis-utilities - - private static final double M_2PI = Math.PI * 2.; private static final Coordinate NAN_COORDINATE = new Coordinate(Coordinate.NULL_ORDINATE, Coordinate.NULL_ORDINATE); // maintain the list of limits sorted by angle private double maxDistance; private List originalSegments = new ArrayList<>(); private double epsilon = 1e-8; // epsilon to help avoid degeneracies - private int numPoints = 96; /** - * @param maxDistance maximum distance (from the view point) constraint for the - * visibility polygon + * Creates a visibility-polygon (isovist) builder with a user-defined range + * limit. + *

+ * When getIsovist(..., true) is called, maxDistance defines the half-size of + * the axis-aligned bounding square centered on the view point (the square’s + * side length is 2*maxDistance). This bounds the result in otherwise unbounded + * scenes. + * + * @param maxDistance positive half-size of the optional bounding square, in the + * same units as the input coordinates */ public VisibilityPolygon(double maxDistance) { this.maxDistance = maxDistance; } + /** + * Creates a visibility-polygon (isovist) builder with a default range limit. + *

+ * Equivalent to new VisibilityPolygon(2500). + */ public VisibilityPolygon() { - this(2000); + this(2500); } /** @@ -90,7 +104,7 @@ public VisibilityPolygon() { * * @param viewPoints the collection of view points from which the isovist is * computed. - * @param addEnvelope a boolean flag indicating whether to include a circle + * @param addEnvelope a boolean flag indicating whether to include a square * bounding box in the resulting geometry. * @return a polygonal geometry representing the isovist. The geometry returned * may be a single polygon or a multipolygon comprising multiple @@ -109,17 +123,20 @@ public Geometry getIsovist(Collection viewPoints, boolean addEnvelop /** * Computes an isovist, the area of the input visible from a given point in + * * space. - * + * * @param viewPoint View coordinate - * @param addEnvelope If true add circle bounding box. This function does not + * + * @param addEnvelope If true add square bounding box. This function does not + * * work properly if the view point is not enclosed by * segments * @return visibility polygon */ public Polygon getIsovist(Coordinate viewPoint, boolean addEnvelope) { - // Add bounding circle - List bounded = new ArrayList<>(originalSegments.size() + numPoints); + // Add bounding square + List bounded = new ArrayList<>(originalSegments.size() + 4); // Compute envelope Envelope env = new Envelope(); @@ -130,27 +147,24 @@ public Polygon getIsovist(Coordinate viewPoint, boolean addEnvelope) { if (addEnvelope) { // Add bounding geom in envelope env.expandToInclude(new Coordinate(viewPoint.x - maxDistance, viewPoint.y - maxDistance)); - env.expandToInclude(new Coordinate(viewPoint.x + maxDistance, viewPoint.y + viewPoint.x)); + env.expandToInclude(new Coordinate(viewPoint.x + maxDistance, viewPoint.y + maxDistance)); GeometricShapeFactory geometricShapeFactory = new GeometricShapeFactory(); geometricShapeFactory.setCentre(new Coordinate(viewPoint.x - env.getMinX(), viewPoint.y - env.getMinY())); geometricShapeFactory.setWidth(maxDistance * 2); geometricShapeFactory.setHeight(maxDistance * 2); - geometricShapeFactory.setNumPoints(numPoints); - addPolygon(bounded, geometricShapeFactory.createEllipse()); + addPolygon(bounded, geometricShapeFactory.createRectangle()); for (SegmentString segment : originalSegments) { final Coordinate a = segment.getCoordinate(0); final Coordinate b = segment.getCoordinate(1); - addSegment(bounded, new Coordinate(a.x - env.getMinX(), a.y - env.getMinY()), - new Coordinate(b.x - env.getMinX(), b.y - env.getMinY())); + addSegment(bounded, new Coordinate(a.x - env.getMinX(), a.y - env.getMinY()), new Coordinate(b.x - env.getMinX(), b.y - env.getMinY())); } - // Intersection with bounding circle + // Intersection with bounding square bounded = fixSegments(bounded); } else { for (SegmentString segment : originalSegments) { final Coordinate a = segment.getCoordinate(0); final Coordinate b = segment.getCoordinate(1); - addSegment(bounded, new Coordinate(a.x - env.getMinX(), a.y - env.getMinY()), - new Coordinate(b.x - env.getMinX(), b.y - env.getMinY())); + addSegment(bounded, new Coordinate(a.x - env.getMinX(), a.y - env.getMinY()), new Coordinate(b.x - env.getMinX(), b.y - env.getMinY())); } } @@ -250,7 +264,7 @@ public void fixSegments() { */ private static List fixSegments(List segments) { MCIndexNoder mCIndexNoder = new MCIndexNoder(); - RobustLineIntersector robustLineIntersector = new RobustLineIntersector(); + LineIntersector robustLineIntersector = new RobustLineIntersector(); mCIndexNoder.setSegmentIntersector(new IntersectionAdder(robustLineIntersector)); mCIndexNoder.computeNodes(segments); Collection nodedSubstring = mCIndexNoder.getNodedSubstrings(); @@ -261,14 +275,6 @@ private static List fixSegments(List segments) { return ret; } - /** - * @param numPoints Number of points of the bounding circle polygon. Default = - * 96. - */ - public void setNumPoints(int numPoints) { - this.numPoints = numPoints; - } - private static double angle(Coordinate a, Coordinate b) { return FastMath.atan2(b.y - a.y, b.x - a.x); } @@ -301,14 +307,7 @@ private static int getParent(int index) { private double angle2(Coordinate a, Coordinate b, Coordinate c) { double a1 = angle(a, b); double a2 = angle(b, c); - double a3 = a1 - a2; - if (a3 < 0) { - a3 += M_2PI; - } - if (a3 > M_2PI) { - a3 -= M_2PI; - } - return a3; + return Angle.normalizePositive(a1 - a2); } private boolean lessThan(int index1, int index2, Coordinate position, List segments, Coordinate destination) { @@ -335,15 +334,14 @@ private boolean lessThan(int index1, int index2, Coordinate position, List heap, Coordinate position, List segments, Coordinate destination, - List map) { + private void remove(int index, List heap, Coordinate position, List segments, Coordinate destination, List map) { map.set(heap.get(index), -1); if (index == heap.size() - 1) { heap.remove(heap.size() - 1); @@ -391,8 +389,7 @@ private void remove(int index, List heap, Coordinate position, List heap, Coordinate position, List segments, Coordinate destination, - List map) { + private void insert(int index, List heap, Coordinate position, List segments, Coordinate destination, List map) { Coordinate inter = intersectLines(segments.get(index), position, destination); if (NAN_COORDINATE.equals2D(inter, epsilon)) { return; @@ -423,57 +420,54 @@ public void setEpsilon(double epsilon) { } /** + * * Explode geometry and add occlusion segments in isovist - * + * * @param geometry Geometry collection, LineString or Polygon instance */ public void addGeometry(Geometry geometry) { if (geometry instanceof LineString) { - addLineString(originalSegments, (LineString) geometry); + addLineString((LineString) geometry); } else if (geometry instanceof Polygon) { - addPolygon(originalSegments, (Polygon) geometry); + addPolygon((Polygon) geometry); } else if (geometry instanceof GeometryCollection) { - addGeometry(originalSegments, (GeometryCollection) geometry); + addGeometry((GeometryCollection) geometry); } } - private static void addGeometry(List segments, GeometryCollection geometry) { + private void addGeometry(GeometryCollection geometry) { int geoCount = geometry.getNumGeometries(); for (int n = 0; n < geoCount; n++) { Geometry simpleGeom = geometry.getGeometryN(n); if (simpleGeom instanceof LineString) { - addLineString(segments, (LineString) simpleGeom); + addLineString((LineString) simpleGeom); } else if (simpleGeom instanceof Polygon) { - addPolygon(segments, (Polygon) simpleGeom); + addPolygon((Polygon) simpleGeom); } else if (simpleGeom instanceof GeometryCollection) { - addGeometry(segments, (GeometryCollection) simpleGeom); + addGeometry((GeometryCollection) simpleGeom); } } } - private static void addPolygon(List segments, Polygon poly) { - addLineString(segments, poly.getExteriorRing()); + private void addPolygon(Polygon poly) { + addLineString(poly.getExteriorRing()); final int ringCount = poly.getNumInteriorRing(); // Keep interior ring if the viewpoint is inside the polygon for (int nr = 0; nr < ringCount; nr++) { - addLineString(segments, poly.getInteriorRingN(nr)); + addLineString(poly.getInteriorRingN(nr)); } } public void addLineString(LineString lineString) { - addLineString(originalSegments, lineString); - } - - private static void addLineString(List segments, LineString lineString) { int nPoint = lineString.getNumPoints(); for (int idPoint = 0; idPoint < nPoint - 1; idPoint++) { - addSegment(segments, lineString.getCoordinateN(idPoint), lineString.getCoordinateN(idPoint + 1)); + addSegment(lineString.getCoordinateN(idPoint), lineString.getCoordinateN(idPoint + 1)); } } /** * Add an occlusion segment to the isovist. - * + * * @param p0 segment origin * @param p1 segment destination */ @@ -481,7 +475,23 @@ public void addSegment(Coordinate p0, Coordinate p1) { if (p0.distance(p1) < epsilon) { return; } - addSegment(originalSegments, p0, p1); + originalSegments.add(new NodedSegmentString(new Coordinate[] { p0, p1 }, originalSegments.size() + 1)); + } + + private static void addPolygon(List segments, Polygon poly) { + addLineString(segments, poly.getExteriorRing()); + final int ringCount = poly.getNumInteriorRing(); + // Keep interior ring if the viewpoint is inside the polygon + for (int nr = 0; nr < ringCount; nr++) { + addLineString(segments, poly.getInteriorRingN(nr)); + } + } + + private static void addLineString(List segments, LineString lineString) { + int nPoint = lineString.getNumPoints(); + for (int idPoint = 0; idPoint < nPoint - 1; idPoint++) { + addSegment(segments, lineString.getCoordinateN(idPoint), lineString.getCoordinateN(idPoint + 1)); + } } private static void addSegment(List segments, Coordinate p0, Coordinate p1) { @@ -489,6 +499,7 @@ private static void addSegment(List segments, Coordinate p0, Coor } /** + * * Defines segment vertices. */ private static final class Vertex implements Comparable {