From 5778426a1c61b71e7326ff42ab0d9242759d647a Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 06:25:50 +0100 Subject: [PATCH 01/41] add lax morphisms --- src/lax/hypergraph.rs | 226 +++++++++++++++++++++++++++++++++++ src/strict/hypergraph/mod.rs | 1 + tests/lax/hypergraph.rs | 59 ++++++++- 3 files changed, 285 insertions(+), 1 deletion(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 08f1a06..3e2c2e0 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -1,4 +1,5 @@ use crate::array::vec::{VecArray, VecKind}; +use crate::category::Arrow; use crate::finite_function::*; use core::fmt::Debug; @@ -68,6 +69,231 @@ pub struct Hypergraph { pub quotient: (Vec, Vec), } +/// A lax morphism of hypergraphs that preserves structure up to the target quotient. +pub struct LaxHypergraphArrow { + /// Source hypergraph + pub source: Hypergraph, + + /// Target hypergraph + pub target: Hypergraph, + + /// Map on nodes + pub w: FiniteFunction, + + /// Map on edges + pub x: FiniteFunction, +} + +fn class_labels( + nodes: &[O], + q: &FiniteFunction, +) -> Result, (NodeId, NodeId)> { + let mut classes: Vec> = vec![None; q.target()]; + for (i, label) in nodes.iter().enumerate() { + let class = q.table[i]; + match &classes[class] { + None => classes[class] = Some((label.clone(), NodeId(i))), + Some((existing, existing_id)) => { + if existing != label { + return Err((NodeId(i), *existing_id)); + } + } + } + } + Ok(classes + .into_iter() + .map(|entry| entry.expect("each class has at least one node").0) + .collect()) +} + +impl LaxHypergraphArrow { + pub fn new( + source: Hypergraph, + target: Hypergraph, + w: FiniteFunction, + x: FiniteFunction, + ) -> Self { + LaxHypergraphArrow { + source, + target, + w, + x, + } + } + + /// Convert this lax morphism to a strict one by quotienting source/target. + /// + /// This panics if the morphism is invalid + pub fn to_strict(self) -> crate::strict::hypergraph::HypergraphArrow { + if self.w.source() != self.source.nodes.len() { + panic!( + "node map source size mismatch: got {}, expected {}", + self.w.source(), + self.source.nodes.len() + ); + } + if self.w.target() != self.target.nodes.len() { + panic!( + "node map target size mismatch: got {}, expected {}", + self.w.target(), + self.target.nodes.len() + ); + } + if self.x.source() != self.source.edges.len() { + panic!( + "edge map source size mismatch: got {}, expected {}", + self.x.source(), + self.source.edges.len() + ); + } + if self.x.target() != self.target.edges.len() { + panic!( + "edge map target size mismatch: got {}, expected {}", + self.x.target(), + self.target.edges.len() + ); + } + + let q_src = self.source.coequalizer(); + let q_tgt = self.target.coequalizer(); + + let _source_labels = class_labels(&self.source.nodes, &q_src).unwrap_or_else(|(a, b)| { + panic!( + "source quotient has conflicting node labels between {:?} and {:?}", + a, b + ) + }); + let target_labels = class_labels(&self.target.nodes, &q_tgt).unwrap_or_else(|(a, b)| { + panic!( + "target quotient has conflicting node labels between {:?} and {:?}", + a, b + ) + }); + + for (i, src_label) in self.source.nodes.iter().enumerate() { + let tgt_node = self.w.table[i]; + let class = q_tgt.table[tgt_node]; + if *src_label != target_labels[class] { + panic!( + "node label mismatch: source node {:?} does not match target class {}", + NodeId(i), + class + ); + } + } + + let mut class_image: Vec> = vec![None; q_src.target()]; + for i in 0..self.source.nodes.len() { + let class = q_src.table[i]; + let image_class = q_tgt.table[self.w.table[i]]; + match class_image[class] { + None => class_image[class] = Some((image_class, NodeId(i))), + Some((existing_class, existing_id)) => { + if existing_class != image_class { + panic!( + "node map not constant on source quotient: {:?}->{}, {:?}->{}", + NodeId(i), + image_class, + existing_id, + existing_class + ); + } + } + } + } + + for (i, src_label) in self.source.edges.iter().enumerate() { + let tgt_edge = self.x.table[i]; + if *src_label != self.target.edges[tgt_edge] { + panic!( + "edge label mismatch: source edge {:?} does not match target edge {:?}", + EdgeId(i), + EdgeId(tgt_edge) + ); + } + + let src_edge = &self.source.adjacency[i]; + let tgt_edge_adj = &self.target.adjacency[tgt_edge]; + + if src_edge.sources.len() != tgt_edge_adj.sources.len() { + panic!( + "source arity mismatch: source edge {:?} vs target edge {:?}", + EdgeId(i), + EdgeId(tgt_edge) + ); + } + if src_edge.targets.len() != tgt_edge_adj.targets.len() { + panic!( + "target arity mismatch: source edge {:?} vs target edge {:?}", + EdgeId(i), + EdgeId(tgt_edge) + ); + } + + let src_sources_classes: Vec = src_edge + .sources + .iter() + .map(|n| q_tgt.table[self.w.table[n.0]]) + .collect(); + let tgt_sources_classes: Vec = tgt_edge_adj + .sources + .iter() + .map(|n| q_tgt.table[n.0]) + .collect(); + if src_sources_classes != tgt_sources_classes { + panic!( + "source incidence mismatch: source edge {:?} vs target edge {:?}", + EdgeId(i), + EdgeId(tgt_edge) + ); + } + + let src_targets_classes: Vec = src_edge + .targets + .iter() + .map(|n| q_tgt.table[self.w.table[n.0]]) + .collect(); + let tgt_targets_classes: Vec = tgt_edge_adj + .targets + .iter() + .map(|n| q_tgt.table[n.0]) + .collect(); + if src_targets_classes != tgt_targets_classes { + panic!( + "target incidence mismatch: source edge {:?} vs target edge {:?}", + EdgeId(i), + EdgeId(tgt_edge) + ); + } + } + + let mut source_h = self.source.clone(); + source_h.quotient(); + let mut target_h = self.target.clone(); + target_h.quotient(); + + let source_strict = make_hypergraph(&source_h) + .validate() + .unwrap_or_else(|err| panic!("invalid source hypergraph after quotient: {:?}", err)); + let target_strict = make_hypergraph(&target_h) + .validate() + .unwrap_or_else(|err| panic!("invalid target hypergraph after quotient: {:?}", err)); + + let w_composed = (&self.w >> &q_tgt).expect("node map already checked"); + let w_table = + coequalizer_universal(&q_src, &w_composed.table).expect("compatibility checked"); + let w_strict = FiniteFunction::new(w_table, q_tgt.target()).expect("by construction"); + + crate::strict::hypergraph::HypergraphArrow::new( + source_strict, + target_strict, + w_strict, + self.x, + ) + .unwrap_or_else(|err| panic!("strict morphism naturality failed: {:?}", err)) + } +} + impl Hypergraph { /// The empty Hypergraph with no nodes or edges. pub fn empty() -> Self { diff --git a/src/strict/hypergraph/mod.rs b/src/strict/hypergraph/mod.rs index 836341d..3c174ca 100644 --- a/src/strict/hypergraph/mod.rs +++ b/src/strict/hypergraph/mod.rs @@ -3,4 +3,5 @@ pub mod arrow; mod object; +pub use arrow::*; pub use object::*; diff --git a/tests/lax/hypergraph.rs b/tests/lax/hypergraph.rs index b3a5cba..68ada5f 100644 --- a/tests/lax/hypergraph.rs +++ b/tests/lax/hypergraph.rs @@ -1,4 +1,6 @@ -use open_hypergraphs::lax::{Hyperedge, Hypergraph, NodeId}; +use open_hypergraphs::array::vec::{VecArray, VecKind}; +use open_hypergraphs::finite_function::FiniteFunction; +use open_hypergraphs::lax::{Hyperedge, Hypergraph, LaxHypergraphArrow, NodeId}; #[test] fn test_delete_nodes_remap_and_quotient() { @@ -82,3 +84,58 @@ fn test_delete_nodes_ignores_out_of_range() { assert_eq!(h.quotient.0, vec![NodeId(0)]); assert_eq!(h.quotient.1, vec![NodeId(1)]); } + +#[test] +fn test_lax_hypergraph_arrow_to_strict_identity() { + let mut source = Hypergraph::empty(); + source.nodes = vec![1, 2]; + source.edges = vec![10]; + source.adjacency = vec![Hyperedge { + sources: vec![NodeId(0)], + targets: vec![NodeId(1)], + }]; + + let mut target = Hypergraph::empty(); + target.nodes = vec![1, 2]; + target.edges = vec![10]; + target.adjacency = vec![Hyperedge { + sources: vec![NodeId(0)], + targets: vec![NodeId(1)], + }]; + + let w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); + let x = FiniteFunction::::new(VecArray(vec![0]), 1).unwrap(); + + let arrow = LaxHypergraphArrow::new(source, target, w, x).to_strict(); + + assert_eq!(arrow.w.table, VecArray(vec![0, 1])); + assert_eq!(arrow.w.target(), 2); + assert_eq!(arrow.x.table, VecArray(vec![0])); + assert_eq!(arrow.x.target(), 1); +} + +#[test] +#[should_panic(expected = "node map not constant on source quotient")] +fn test_lax_hypergraph_arrow_to_strict_panics_on_nonconstant_w() { + let mut source = Hypergraph::empty(); + source.nodes = vec![1, 1]; + source.edges = vec![10]; + source.adjacency = vec![Hyperedge { + sources: vec![NodeId(0)], + targets: vec![NodeId(1)], + }]; + source.quotient = (vec![NodeId(0)], vec![NodeId(1)]); + + let mut target = Hypergraph::empty(); + target.nodes = vec![1, 1]; + target.edges = vec![10]; + target.adjacency = vec![Hyperedge { + sources: vec![NodeId(0)], + targets: vec![NodeId(1)], + }]; + + let w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); + let x = FiniteFunction::::new(VecArray(vec![0]), 1).unwrap(); + + let _ = LaxHypergraphArrow::new(source, target, w, x).to_strict(); +} From f339159f559325fb47479854b8229bbacec04c34 Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 07:10:55 +0100 Subject: [PATCH 02/41] implement Rule and Match --- src/lax/mod.rs | 2 + src/lax/rewrite.rs | 178 ++++++++++++++++++++++++++++++++++++++++ tests/lax/hypergraph.rs | 2 +- tests/lax/mod.rs | 1 + tests/lax/rewrite.rs | 134 ++++++++++++++++++++++++++++++ 5 files changed, 316 insertions(+), 1 deletion(-) create mode 100644 src/lax/rewrite.rs create mode 100644 tests/lax/rewrite.rs diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 4cf746c..646d9f7 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -76,10 +76,12 @@ pub mod functor; pub mod hypergraph; pub mod mut_category; pub mod open_hypergraph; +pub mod rewrite; pub use crate::category::*; pub use hypergraph::*; pub use open_hypergraph::*; +pub use rewrite::*; pub mod optic; pub mod var; diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs new file mode 100644 index 0000000..8224d85 --- /dev/null +++ b/src/lax/rewrite.rs @@ -0,0 +1,178 @@ +use crate::lax::{Arrow, Hypergraph, LaxHypergraphArrow}; + +/// A rewrite rule as a span `L <- K -> R` of lax morphisms. +pub struct Rule { + pub lhs: Hypergraph, + pub interface: Hypergraph, + pub rhs: Hypergraph, + pub left: LaxHypergraphArrow, + pub right: LaxHypergraphArrow, +} + +impl Rule { + /// Construct a rule and validate its structural properties. + pub fn new( + lhs: Hypergraph, + interface: Hypergraph, + rhs: Hypergraph, + left: LaxHypergraphArrow, + right: LaxHypergraphArrow, + ) -> Self { + Rule { + lhs, + interface, + rhs, + left, + right, + } + .validate() + } + + /// Validate the span structure `L <- K -> R`. + pub fn validate(self) -> Self { + if self.left.source != self.interface { + panic!("rule left morphism source does not match interface"); + } + if self.left.target != self.lhs { + panic!("rule left morphism target does not match lhs"); + } + if self.right.source != self.interface { + panic!("rule right morphism source does not match interface"); + } + if self.right.target != self.rhs { + panic!("rule right morphism target does not match rhs"); + } + + self + } +} + +/// A rule-specific match `L -> G`, carrying the rule and the morphism. +pub struct Match { + pub rule: Rule, + pub morphism: LaxHypergraphArrow, +} + +impl Match { + /// Construct a match without validation. + pub fn new(rule: Rule, morphism: LaxHypergraphArrow) -> Self { + Match { rule, morphism }.validate() + } + + /// Validate a match against identification and dangling conditions. + pub fn validate(self) -> Self { + let l = &self.rule.left; + let m = &self.morphism; + let g = &m.target; + + if l.w.target() != self.rule.lhs.nodes.len() { + panic!( + "rule left map target size mismatch: got {}, expected {}", + l.w.target(), + self.rule.lhs.nodes.len() + ); + } + if m.w.source() != self.rule.lhs.nodes.len() { + panic!( + "match map source size mismatch: got {}, expected {}", + m.w.source(), + self.rule.lhs.nodes.len() + ); + } + if m.w.target() != g.nodes.len() { + panic!( + "match map target size mismatch: got {}, expected {}", + m.w.target(), + g.nodes.len() + ); + } + if m.x.target() != g.edges.len() { + panic!( + "match edge map target size mismatch: got {}, expected {}", + m.x.target(), + g.edges.len() + ); + } + if !self.identification_condition() { + panic!("match violates the identification condition"); + } + if !self.dangling_condition() { + panic!("match violates the dangling condition"); + } + self + } + + fn identification_condition(&self) -> bool { + // Identification fails if x,y ∈ L \ l(K) and m(x) = m(y). + let l = &self.rule.left; + let m = &self.morphism; + + let mut in_image = vec![false; self.rule.lhs.nodes.len()]; + for i in 0..l.w.source() { + let idx = l.w.table[i]; + in_image[idx] = true; + } + + let mut seen = vec![None; m.w.target()]; + for i in 0..self.rule.lhs.nodes.len() { + if in_image[i] { + continue; + } + let img = m.w.table[i]; + if let Some(existing) = seen[img] { + if existing != i { + return false; + } + } else { + seen[img] = Some(i); + } + } + + true + } + + fn dangling_condition(&self) -> bool { + // Dangling fails if a vertex in L \ l(K) is incident to any hyperedge in G + // outside the image of m(L). + let l = &self.rule.left; + let m = &self.morphism; + let g = &m.target; + + let mut in_l_image = vec![false; self.rule.lhs.nodes.len()]; + for i in 0..l.w.source() { + let idx = l.w.table[i]; + in_l_image[idx] = true; + } + + let mut forbidden_nodes = vec![false; g.nodes.len()]; + for i in 0..self.rule.lhs.nodes.len() { + if in_l_image[i] { + continue; + } + let img = m.w.table[i]; + forbidden_nodes[img] = true; + } + + let mut edge_in_image = vec![false; g.edges.len()]; + for i in 0..m.x.source() { + let idx = m.x.table[i]; + edge_in_image[idx] = true; + } + + for (edge_id, edge) in g.adjacency.iter().enumerate() { + if edge_in_image[edge_id] { + continue; + } + let touches_forbidden = edge + .sources + .iter() + .chain(edge.targets.iter()) + .any(|n| forbidden_nodes[n.0]); + if touches_forbidden { + return false; + } + } + + true + } +} diff --git a/tests/lax/hypergraph.rs b/tests/lax/hypergraph.rs index 68ada5f..242e73a 100644 --- a/tests/lax/hypergraph.rs +++ b/tests/lax/hypergraph.rs @@ -1,6 +1,6 @@ use open_hypergraphs::array::vec::{VecArray, VecKind}; use open_hypergraphs::finite_function::FiniteFunction; -use open_hypergraphs::lax::{Hyperedge, Hypergraph, LaxHypergraphArrow, NodeId}; +use open_hypergraphs::lax::{Arrow, Hyperedge, Hypergraph, LaxHypergraphArrow, NodeId}; #[test] fn test_delete_nodes_remap_and_quotient() { diff --git a/tests/lax/mod.rs b/tests/lax/mod.rs index a3a11c8..4f3250b 100644 --- a/tests/lax/mod.rs +++ b/tests/lax/mod.rs @@ -1,2 +1,3 @@ pub mod eval; pub mod hypergraph; +pub mod rewrite; diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs new file mode 100644 index 0000000..d60b556 --- /dev/null +++ b/tests/lax/rewrite.rs @@ -0,0 +1,134 @@ +use open_hypergraphs::array::vec::{VecArray, VecKind}; +use open_hypergraphs::finite_function::FiniteFunction; +use open_hypergraphs::lax::{Hyperedge, Hypergraph, LaxHypergraphArrow, Match, NodeId, Rule}; + +fn empty_edges() -> FiniteFunction { + FiniteFunction::::new(VecArray(vec![]), 0).unwrap() +} + +#[test] +fn test_rule_new_ok() { + let mut k: Hypergraph = Hypergraph::empty(); + k.nodes = vec![1]; + + let mut l: Hypergraph = Hypergraph::empty(); + l.nodes = vec![1, 2]; + + let mut r: Hypergraph = Hypergraph::empty(); + r.nodes = vec![1, 2]; + + let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + + let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let _rule = Rule::new(l, k, r, left, right); +} + +#[test] +#[should_panic(expected = "rule left morphism target does not match lhs")] +fn test_rule_new_panics_on_mismatch() { + let mut k: Hypergraph = Hypergraph::empty(); + k.nodes = vec![1]; + + let mut l: Hypergraph = Hypergraph::empty(); + l.nodes = vec![1, 2]; + + let mut r: Hypergraph = Hypergraph::empty(); + r.nodes = vec![1, 2]; + + let mut bad_lhs: Hypergraph = Hypergraph::empty(); + bad_lhs.nodes = vec![9]; + + let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + + let left = LaxHypergraphArrow::new(k.clone(), l, left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let _rule = Rule::new(bad_lhs, k, r, left, right); +} + +#[test] +fn test_match_validate_ok() { + let mut k: Hypergraph = Hypergraph::empty(); + k.nodes = vec![1]; + + let mut l: Hypergraph = Hypergraph::empty(); + l.nodes = vec![1, 2]; + + let mut r: Hypergraph = Hypergraph::empty(); + r.nodes = vec![1, 2]; + + let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let rule = Rule::new(l.clone(), k, r.clone(), left, right); + + let m_w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); + let morphism = LaxHypergraphArrow::new(l, r.clone(), m_w, empty_edges()); + + let _match = Match::new(rule, morphism).validate(); +} + +#[test] +#[should_panic(expected = "match violates the identification condition")] +fn test_match_validate_identification_fails() { + let mut k: Hypergraph = Hypergraph::empty(); + k.nodes = vec![1]; + + let mut l: Hypergraph = Hypergraph::empty(); + l.nodes = vec![1, 2, 3]; + + let mut r: Hypergraph = Hypergraph::empty(); + r.nodes = vec![1, 2, 3]; + + let left_w = FiniteFunction::::new(VecArray(vec![0]), 3).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![0]), 3).unwrap(); + let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let rule = Rule::new(l.clone(), k, r.clone(), left, right); + + let m_w = FiniteFunction::::new(VecArray(vec![0, 0, 0]), 3).unwrap(); + let morphism = LaxHypergraphArrow::new(l, r, m_w, empty_edges()); + + let _match = Match::new(rule, morphism).validate(); +} + +#[test] +#[should_panic(expected = "match violates the dangling condition")] +fn test_match_validate_dangling_fails() { + let mut k: Hypergraph = Hypergraph::empty(); + k.nodes = vec![1]; + + let mut l: Hypergraph = Hypergraph::empty(); + l.nodes = vec![1, 2]; + + let mut r: Hypergraph = Hypergraph::empty(); + r.nodes = vec![1, 2]; + + let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); + let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let rule = Rule::new(l.clone(), k, r.clone(), left, right); + + let mut g: Hypergraph = Hypergraph::empty(); + g.nodes = vec![1, 2]; + g.edges = vec![10]; + g.adjacency = vec![Hyperedge { + sources: vec![NodeId(1)], + targets: vec![], + }]; + + let m_w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); + let m_x = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); + let morphism = LaxHypergraphArrow::new(l, g, m_w, m_x); + + let _match = Match::new(rule, morphism).validate(); +} From 182afc9f8f49ac4b824d4c64a82db8cd50da2fec Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 07:30:57 +0100 Subject: [PATCH 03/41] more tests --- tests/lax/rewrite.rs | 119 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 97 insertions(+), 22 deletions(-) diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index d60b556..c7c1e97 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -1,6 +1,6 @@ use open_hypergraphs::array::vec::{VecArray, VecKind}; use open_hypergraphs::finite_function::FiniteFunction; -use open_hypergraphs::lax::{Hyperedge, Hypergraph, LaxHypergraphArrow, Match, NodeId, Rule}; +use open_hypergraphs::lax::{Hyperedge, Hypergraph, LaxHypergraphArrow, Match, Rule}; fn empty_edges() -> FiniteFunction { FiniteFunction::::new(VecArray(vec![]), 0).unwrap() @@ -9,13 +9,15 @@ fn empty_edges() -> FiniteFunction { #[test] fn test_rule_new_ok() { let mut k: Hypergraph = Hypergraph::empty(); - k.nodes = vec![1]; + k.new_node(1); let mut l: Hypergraph = Hypergraph::empty(); - l.nodes = vec![1, 2]; + l.new_node(1); + l.new_node(2); let mut r: Hypergraph = Hypergraph::empty(); - r.nodes = vec![1, 2]; + r.new_node(1); + r.new_node(2); let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); @@ -30,16 +32,18 @@ fn test_rule_new_ok() { #[should_panic(expected = "rule left morphism target does not match lhs")] fn test_rule_new_panics_on_mismatch() { let mut k: Hypergraph = Hypergraph::empty(); - k.nodes = vec![1]; + k.new_node(1); let mut l: Hypergraph = Hypergraph::empty(); - l.nodes = vec![1, 2]; + l.new_node(1); + l.new_node(2); let mut r: Hypergraph = Hypergraph::empty(); - r.nodes = vec![1, 2]; + r.new_node(1); + r.new_node(2); let mut bad_lhs: Hypergraph = Hypergraph::empty(); - bad_lhs.nodes = vec![9]; + bad_lhs.new_node(9); let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); @@ -53,13 +57,15 @@ fn test_rule_new_panics_on_mismatch() { #[test] fn test_match_validate_ok() { let mut k: Hypergraph = Hypergraph::empty(); - k.nodes = vec![1]; + k.new_node(1); let mut l: Hypergraph = Hypergraph::empty(); - l.nodes = vec![1, 2]; + l.new_node(1); + l.new_node(2); let mut r: Hypergraph = Hypergraph::empty(); - r.nodes = vec![1, 2]; + r.new_node(1); + r.new_node(2); let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); @@ -78,13 +84,17 @@ fn test_match_validate_ok() { #[should_panic(expected = "match violates the identification condition")] fn test_match_validate_identification_fails() { let mut k: Hypergraph = Hypergraph::empty(); - k.nodes = vec![1]; + k.new_node(1); let mut l: Hypergraph = Hypergraph::empty(); - l.nodes = vec![1, 2, 3]; + l.new_node(1); + l.new_node(2); + l.new_node(3); let mut r: Hypergraph = Hypergraph::empty(); - r.nodes = vec![1, 2, 3]; + r.new_node(1); + r.new_node(2); + r.new_node(3); let left_w = FiniteFunction::::new(VecArray(vec![0]), 3).unwrap(); let right_w = FiniteFunction::::new(VecArray(vec![0]), 3).unwrap(); @@ -103,13 +113,15 @@ fn test_match_validate_identification_fails() { #[should_panic(expected = "match violates the dangling condition")] fn test_match_validate_dangling_fails() { let mut k: Hypergraph = Hypergraph::empty(); - k.nodes = vec![1]; + k.new_node(1); let mut l: Hypergraph = Hypergraph::empty(); - l.nodes = vec![1, 2]; + l.new_node(1); + l.new_node(2); let mut r: Hypergraph = Hypergraph::empty(); - r.nodes = vec![1, 2]; + r.new_node(1); + r.new_node(2); let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); @@ -119,12 +131,12 @@ fn test_match_validate_dangling_fails() { let rule = Rule::new(l.clone(), k, r.clone(), left, right); let mut g: Hypergraph = Hypergraph::empty(); - g.nodes = vec![1, 2]; - g.edges = vec![10]; - g.adjacency = vec![Hyperedge { - sources: vec![NodeId(1)], + g.new_node(1); + let g1 = g.new_node(2); + g.new_edge(10, Hyperedge { + sources: vec![g1], targets: vec![], - }]; + }); let m_w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); let m_x = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); @@ -132,3 +144,66 @@ fn test_match_validate_dangling_fails() { let _match = Match::new(rule, morphism).validate(); } + +#[test] +#[should_panic(expected = "match violates the identification condition")] +fn test_match_validate_identification_fails_simple() { + // K = ∅, L = {a, b}. G has one node w and m(a) = m(b) = w. + let k: Hypergraph = Hypergraph::empty(); + + let mut l: Hypergraph = Hypergraph::empty(); + l.new_node(1); + l.new_node(2); + + let mut r: Hypergraph = Hypergraph::empty(); + r.new_node(1); + r.new_node(2); + + let left_w = FiniteFunction::::new(VecArray(vec![]), 2).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![]), 2).unwrap(); + let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let rule = Rule::new(l.clone(), k, r, left, right); + + let mut g: Hypergraph = Hypergraph::empty(); + g.new_node(1); + + let m_w = FiniteFunction::::new(VecArray(vec![0, 0]), 1).unwrap(); + let morphism = LaxHypergraphArrow::new(l, g, m_w, empty_edges()); + + let _match = Match::new(rule, morphism).validate(); +} + +#[test] +#[should_panic(expected = "match violates the dangling condition")] +fn test_match_validate_dangling_fails_simple() { + // K = ∅, L = {u}. G = {v} with a loop edge e: v -> v, and m(u) = v. + let k: Hypergraph = Hypergraph::empty(); + + let mut l: Hypergraph = Hypergraph::empty(); + l.new_node(1); + + let mut r: Hypergraph = Hypergraph::empty(); + r.new_node(1); + + let left_w = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); + let right_w = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); + let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); + let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); + + let rule = Rule::new(l.clone(), k, r, left, right); + + let mut g: Hypergraph = Hypergraph::empty(); + let v = g.new_node(1); + g.new_edge(10, Hyperedge { + sources: vec![v], + targets: vec![v], + }); + + let m_w = FiniteFunction::::new(VecArray(vec![0]), 1).unwrap(); + let m_x = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); + let morphism = LaxHypergraphArrow::new(l, g, m_w, m_x); + + let _match = Match::new(rule, morphism).validate(); +} From fa66c24e2d34b39c8080d08cf423c48a0ac10759 Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 11:30:22 +0100 Subject: [PATCH 04/41] refactor: avoid cloning hypergraphs --- src/lax/hypergraph.rs | 98 ++++++++++++++++ src/lax/rewrite.rs | 255 +++++++++++++++++------------------------- tests/lax/rewrite.rs | 228 +++++++++---------------------------- 3 files changed, 253 insertions(+), 328 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 3e2c2e0..58ee778 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -19,6 +19,104 @@ pub struct Hyperedge { pub targets: Vec, } +/// A map on nodes and edges without structure preservation guarantees. +#[derive(Clone)] +pub struct NodeEdgeMap { + pub nodes: FiniteFunction, + pub edges: FiniteFunction, +} + +#[derive(Clone)] +pub struct LaxSpan { + pub apex: Hypergraph, + pub left: Hypergraph, + pub right: Hypergraph, + pub left_map: NodeEdgeMap, + pub right_map: NodeEdgeMap, +} + +impl LaxSpan { + /// Construct a lax span and validate its structural properties. + pub fn new( + apex: Hypergraph, + left: Hypergraph, + right: Hypergraph, + left_map: NodeEdgeMap, + right_map: NodeEdgeMap, + ) -> Self { + LaxSpan { + apex, + left, + right, + left_map, + right_map, + } + .validate() + } + + /// Validate that maps are compatible with their hypergraphs. + pub fn validate(self) -> Self { + if self.left_map.nodes.source() != self.apex.nodes.len() { + panic!( + "left map node source size mismatch: got {}, expected {}", + self.left_map.nodes.source(), + self.apex.nodes.len() + ); + } + if self.left_map.nodes.target() != self.left.nodes.len() { + panic!( + "left map node target size mismatch: got {}, expected {}", + self.left_map.nodes.target(), + self.left.nodes.len() + ); + } + if self.left_map.edges.source() != self.apex.edges.len() { + panic!( + "left map edge source size mismatch: got {}, expected {}", + self.left_map.edges.source(), + self.apex.edges.len() + ); + } + if self.left_map.edges.target() != self.left.edges.len() { + panic!( + "left map edge target size mismatch: got {}, expected {}", + self.left_map.edges.target(), + self.left.edges.len() + ); + } + if self.right_map.nodes.source() != self.apex.nodes.len() { + panic!( + "right map node source size mismatch: got {}, expected {}", + self.right_map.nodes.source(), + self.apex.nodes.len() + ); + } + if self.right_map.nodes.target() != self.right.nodes.len() { + panic!( + "right map node target size mismatch: got {}, expected {}", + self.right_map.nodes.target(), + self.right.nodes.len() + ); + } + if self.right_map.edges.source() != self.apex.edges.len() { + panic!( + "right map edge source size mismatch: got {}, expected {}", + self.right_map.edges.source(), + self.apex.edges.len() + ); + } + if self.right_map.edges.target() != self.right.edges.len() { + panic!( + "right map edge target size mismatch: got {}, expected {}", + self.right_map.edges.target(), + self.right.edges.len() + ); + } + + self + } +} + /// Create a [`Hyperedge`] from a tuple of `(sources, targets)`. /// /// This allows convenient creation of hyperedges from various collection types: diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 8224d85..05e128d 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -1,178 +1,123 @@ -use crate::lax::{Arrow, Hypergraph, LaxHypergraphArrow}; +use crate::lax::{Arrow, Hypergraph, LaxSpan, NodeEdgeMap}; + +/// Rewrite a lax hypergraph using a rule span and candidate map. +pub fn rewrite( + g: &Hypergraph, + rule: &LaxSpan, + candidate: &NodeEdgeMap, +) -> Option> { + let rule = rule.clone().validate(); + + validate_candidate_map(&rule.left, g, candidate); + if !identification_condition(&rule, candidate) { + return None; + } + if !dangling_condition(&rule, candidate, g) { + return None; + } -/// A rewrite rule as a span `L <- K -> R` of lax morphisms. -pub struct Rule { - pub lhs: Hypergraph, - pub interface: Hypergraph, - pub rhs: Hypergraph, - pub left: LaxHypergraphArrow, - pub right: LaxHypergraphArrow, + Some(g.clone()) } -impl Rule { - /// Construct a rule and validate its structural properties. - pub fn new( - lhs: Hypergraph, - interface: Hypergraph, - rhs: Hypergraph, - left: LaxHypergraphArrow, - right: LaxHypergraphArrow, - ) -> Self { - Rule { - lhs, - interface, - rhs, - left, - right, - } - .validate() +fn validate_candidate_map( + left: &Hypergraph, + g: &Hypergraph, + candidate: &NodeEdgeMap, +) { + if candidate.nodes.source() != left.nodes.len() { + panic!( + "candidate map node source size mismatch: got {}, expected {}", + candidate.nodes.source(), + left.nodes.len() + ); } - - /// Validate the span structure `L <- K -> R`. - pub fn validate(self) -> Self { - if self.left.source != self.interface { - panic!("rule left morphism source does not match interface"); - } - if self.left.target != self.lhs { - panic!("rule left morphism target does not match lhs"); - } - if self.right.source != self.interface { - panic!("rule right morphism source does not match interface"); - } - if self.right.target != self.rhs { - panic!("rule right morphism target does not match rhs"); - } - - self + if candidate.nodes.target() != g.nodes.len() { + panic!( + "candidate map node target size mismatch: got {}, expected {}", + candidate.nodes.target(), + g.nodes.len() + ); + } + if candidate.edges.source() != left.edges.len() { + panic!( + "candidate map edge source size mismatch: got {}, expected {}", + candidate.edges.source(), + left.edges.len() + ); + } + if candidate.edges.target() != g.edges.len() { + panic!( + "candidate map edge target size mismatch: got {}, expected {}", + candidate.edges.target(), + g.edges.len() + ); } } -/// A rule-specific match `L -> G`, carrying the rule and the morphism. -pub struct Match { - pub rule: Rule, - pub morphism: LaxHypergraphArrow, -} - -impl Match { - /// Construct a match without validation. - pub fn new(rule: Rule, morphism: LaxHypergraphArrow) -> Self { - Match { rule, morphism }.validate() +fn identification_condition(rule: &LaxSpan, candidate: &NodeEdgeMap) -> bool { + let mut in_image = vec![false; rule.left.nodes.len()]; + for i in 0..rule.left_map.nodes.source() { + let idx = rule.left_map.nodes.table[i]; + in_image[idx] = true; } - /// Validate a match against identification and dangling conditions. - pub fn validate(self) -> Self { - let l = &self.rule.left; - let m = &self.morphism; - let g = &m.target; - - if l.w.target() != self.rule.lhs.nodes.len() { - panic!( - "rule left map target size mismatch: got {}, expected {}", - l.w.target(), - self.rule.lhs.nodes.len() - ); - } - if m.w.source() != self.rule.lhs.nodes.len() { - panic!( - "match map source size mismatch: got {}, expected {}", - m.w.source(), - self.rule.lhs.nodes.len() - ); + let mut seen = vec![None; candidate.nodes.target()]; + for i in 0..rule.left.nodes.len() { + if in_image[i] { + continue; } - if m.w.target() != g.nodes.len() { - panic!( - "match map target size mismatch: got {}, expected {}", - m.w.target(), - g.nodes.len() - ); - } - if m.x.target() != g.edges.len() { - panic!( - "match edge map target size mismatch: got {}, expected {}", - m.x.target(), - g.edges.len() - ); - } - if !self.identification_condition() { - panic!("match violates the identification condition"); - } - if !self.dangling_condition() { - panic!("match violates the dangling condition"); + let img = candidate.nodes.table[i]; + if let Some(existing) = seen[img] { + if existing != i { + return false; + } + } else { + seen[img] = Some(i); } - self } - fn identification_condition(&self) -> bool { - // Identification fails if x,y ∈ L \ l(K) and m(x) = m(y). - let l = &self.rule.left; - let m = &self.morphism; - - let mut in_image = vec![false; self.rule.lhs.nodes.len()]; - for i in 0..l.w.source() { - let idx = l.w.table[i]; - in_image[idx] = true; - } - - let mut seen = vec![None; m.w.target()]; - for i in 0..self.rule.lhs.nodes.len() { - if in_image[i] { - continue; - } - let img = m.w.table[i]; - if let Some(existing) = seen[img] { - if existing != i { - return false; - } - } else { - seen[img] = Some(i); - } - } + true +} - true +fn dangling_condition( + rule: &LaxSpan, + candidate: &NodeEdgeMap, + g: &Hypergraph, +) -> bool { + let mut in_l_image = vec![false; rule.left.nodes.len()]; + for i in 0..rule.left_map.nodes.source() { + let idx = rule.left_map.nodes.table[i]; + in_l_image[idx] = true; } - fn dangling_condition(&self) -> bool { - // Dangling fails if a vertex in L \ l(K) is incident to any hyperedge in G - // outside the image of m(L). - let l = &self.rule.left; - let m = &self.morphism; - let g = &m.target; - - let mut in_l_image = vec![false; self.rule.lhs.nodes.len()]; - for i in 0..l.w.source() { - let idx = l.w.table[i]; - in_l_image[idx] = true; + let mut forbidden_nodes = vec![false; g.nodes.len()]; + for i in 0..rule.left.nodes.len() { + if in_l_image[i] { + continue; } + let img = candidate.nodes.table[i]; + forbidden_nodes[img] = true; + } - let mut forbidden_nodes = vec![false; g.nodes.len()]; - for i in 0..self.rule.lhs.nodes.len() { - if in_l_image[i] { - continue; - } - let img = m.w.table[i]; - forbidden_nodes[img] = true; - } + let mut edge_in_image = vec![false; g.edges.len()]; + for i in 0..candidate.edges.source() { + let idx = candidate.edges.table[i]; + edge_in_image[idx] = true; + } - let mut edge_in_image = vec![false; g.edges.len()]; - for i in 0..m.x.source() { - let idx = m.x.table[i]; - edge_in_image[idx] = true; + for (edge_id, edge) in g.adjacency.iter().enumerate() { + if edge_in_image[edge_id] { + continue; } - - for (edge_id, edge) in g.adjacency.iter().enumerate() { - if edge_in_image[edge_id] { - continue; - } - let touches_forbidden = edge - .sources - .iter() - .chain(edge.targets.iter()) - .any(|n| forbidden_nodes[n.0]); - if touches_forbidden { - return false; - } + let touches_forbidden = edge + .sources + .iter() + .chain(edge.targets.iter()) + .any(|n| forbidden_nodes[n.0]); + if touches_forbidden { + return false; } - - true } + + true } diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index c7c1e97..b537881 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -1,209 +1,91 @@ use open_hypergraphs::array::vec::{VecArray, VecKind}; use open_hypergraphs::finite_function::FiniteFunction; -use open_hypergraphs::lax::{Hyperedge, Hypergraph, LaxHypergraphArrow, Match, Rule}; +use open_hypergraphs::lax::{rewrite, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap}; -fn empty_edges() -> FiniteFunction { - FiniteFunction::::new(VecArray(vec![]), 0).unwrap() +fn empty_map(target: usize) -> FiniteFunction { + FiniteFunction::::new(VecArray(vec![]), target).unwrap() } -#[test] -fn test_rule_new_ok() { - let mut k: Hypergraph = Hypergraph::empty(); - k.new_node(1); - - let mut l: Hypergraph = Hypergraph::empty(); - l.new_node(1); - l.new_node(2); - - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - r.new_node(2); - - let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - - let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let _rule = Rule::new(l, k, r, left, right); -} - -#[test] -#[should_panic(expected = "rule left morphism target does not match lhs")] -fn test_rule_new_panics_on_mismatch() { - let mut k: Hypergraph = Hypergraph::empty(); - k.new_node(1); - - let mut l: Hypergraph = Hypergraph::empty(); - l.new_node(1); - l.new_node(2); - - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - r.new_node(2); - - let mut bad_lhs: Hypergraph = Hypergraph::empty(); - bad_lhs.new_node(9); - - let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - - let left = LaxHypergraphArrow::new(k.clone(), l, left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let _rule = Rule::new(bad_lhs, k, r, left, right); -} - -#[test] -fn test_match_validate_ok() { - let mut k: Hypergraph = Hypergraph::empty(); - k.new_node(1); - - let mut l: Hypergraph = Hypergraph::empty(); - l.new_node(1); - l.new_node(2); - - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - r.new_node(2); - - let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let rule = Rule::new(l.clone(), k, r.clone(), left, right); - - let m_w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); - let morphism = LaxHypergraphArrow::new(l, r.clone(), m_w, empty_edges()); - - let _match = Match::new(rule, morphism).validate(); +fn span_with_empty_apex( + left: Hypergraph, + right: Hypergraph, +) -> LaxSpan { + let apex: Hypergraph = Hypergraph::empty(); + let left_map = NodeEdgeMap { + nodes: empty_map(left.nodes.len()), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: empty_map(right.nodes.len()), + edges: empty_map(right.edges.len()), + }; + LaxSpan::new(apex, left, right, left_map, right_map) } #[test] -#[should_panic(expected = "match violates the identification condition")] -fn test_match_validate_identification_fails() { - let mut k: Hypergraph = Hypergraph::empty(); - k.new_node(1); - - let mut l: Hypergraph = Hypergraph::empty(); - l.new_node(1); - l.new_node(2); - l.new_node(3); - - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - r.new_node(2); - r.new_node(3); - - let left_w = FiniteFunction::::new(VecArray(vec![0]), 3).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![0]), 3).unwrap(); - let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let rule = Rule::new(l.clone(), k, r.clone(), left, right); - - let m_w = FiniteFunction::::new(VecArray(vec![0, 0, 0]), 3).unwrap(); - let morphism = LaxHypergraphArrow::new(l, r, m_w, empty_edges()); - - let _match = Match::new(rule, morphism).validate(); -} - -#[test] -#[should_panic(expected = "match violates the dangling condition")] -fn test_match_validate_dangling_fails() { - let mut k: Hypergraph = Hypergraph::empty(); - k.new_node(1); - +fn test_rewrite_identification_fails() { + // K = ∅, L = {a, b}. G has one node w and m(a) = m(b) = w. let mut l: Hypergraph = Hypergraph::empty(); l.new_node(1); l.new_node(2); - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - r.new_node(2); - - let left_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![0]), 2).unwrap(); - let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let rule = Rule::new(l.clone(), k, r.clone(), left, right); + let r: Hypergraph = Hypergraph::empty(); + let rule = span_with_empty_apex(l.clone(), r); let mut g: Hypergraph = Hypergraph::empty(); g.new_node(1); - let g1 = g.new_node(2); - g.new_edge(10, Hyperedge { - sources: vec![g1], - targets: vec![], - }); - let m_w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); - let m_x = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); - let morphism = LaxHypergraphArrow::new(l, g, m_w, m_x); + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; - let _match = Match::new(rule, morphism).validate(); + assert!(rewrite(&g, &rule, &candidate).is_none()); } #[test] -#[should_panic(expected = "match violates the identification condition")] -fn test_match_validate_identification_fails_simple() { - // K = ∅, L = {a, b}. G has one node w and m(a) = m(b) = w. - let k: Hypergraph = Hypergraph::empty(); - +fn test_rewrite_dangling_fails() { + // K = ∅, L = {u}. G = {v} with a loop edge e: v -> v, and m(u) = v. let mut l: Hypergraph = Hypergraph::empty(); l.new_node(1); - l.new_node(2); - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - r.new_node(2); - - let left_w = FiniteFunction::::new(VecArray(vec![]), 2).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![]), 2).unwrap(); - let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let rule = Rule::new(l.clone(), k, r, left, right); + let r: Hypergraph = Hypergraph::empty(); + let rule = span_with_empty_apex(l.clone(), r); let mut g: Hypergraph = Hypergraph::empty(); - g.new_node(1); - - let m_w = FiniteFunction::::new(VecArray(vec![0, 0]), 1).unwrap(); - let morphism = LaxHypergraphArrow::new(l, g, m_w, empty_edges()); - - let _match = Match::new(rule, morphism).validate(); + let v = g.new_node(1); + g.new_edge( + 10, + Hyperedge { + sources: vec![v], + targets: vec![v], + }, + ); + + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; + + assert!(rewrite(&g, &rule, &candidate).is_none()); } #[test] -#[should_panic(expected = "match violates the dangling condition")] -fn test_match_validate_dangling_fails_simple() { - // K = ∅, L = {u}. G = {v} with a loop edge e: v -> v, and m(u) = v. - let k: Hypergraph = Hypergraph::empty(); - +fn test_rewrite_gluing_ok() { + // K = ∅, L = {u}. G = {v} with no edges, and m(u) = v. let mut l: Hypergraph = Hypergraph::empty(); l.new_node(1); - let mut r: Hypergraph = Hypergraph::empty(); - r.new_node(1); - - let left_w = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); - let right_w = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); - let left = LaxHypergraphArrow::new(k.clone(), l.clone(), left_w, empty_edges()); - let right = LaxHypergraphArrow::new(k.clone(), r.clone(), right_w, empty_edges()); - - let rule = Rule::new(l.clone(), k, r, left, right); + let r: Hypergraph = Hypergraph::empty(); + let rule = span_with_empty_apex(l.clone(), r); let mut g: Hypergraph = Hypergraph::empty(); - let v = g.new_node(1); - g.new_edge(10, Hyperedge { - sources: vec![v], - targets: vec![v], - }); + g.new_node(1); - let m_w = FiniteFunction::::new(VecArray(vec![0]), 1).unwrap(); - let m_x = FiniteFunction::::new(VecArray(vec![]), 1).unwrap(); - let morphism = LaxHypergraphArrow::new(l, g, m_w, m_x); + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; - let _match = Match::new(rule, morphism).validate(); + assert!(rewrite(&g, &rule, &candidate).is_some()); } From 618ee77c00eabfb505c2d4560e967fe6d1d5f724 Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 12:43:29 +0100 Subject: [PATCH 05/41] exploded context --- src/lax/rewrite.rs | 77 ++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 64 insertions(+), 13 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 05e128d..e863ff7 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -1,4 +1,4 @@ -use crate::lax::{Arrow, Hypergraph, LaxSpan, NodeEdgeMap}; +use crate::lax::{Arrow, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, NodeId}; /// Rewrite a lax hypergraph using a rule span and candidate map. pub fn rewrite( @@ -6,29 +6,80 @@ pub fn rewrite( rule: &LaxSpan, candidate: &NodeEdgeMap, ) -> Option> { - let rule = rule.clone().validate(); - - validate_candidate_map(&rule.left, g, candidate); - if !identification_condition(&rule, candidate) { + validate_candidate_map(rule, g, candidate); + if !identification_condition(rule, candidate) || !dangling_condition(rule, candidate, g) { return None; } - if !dangling_condition(&rule, candidate, g) { - return None; + let exploded = exploded_context(g, rule, candidate); + Some(exploded) +} + +fn exploded_context( + g: &Hypergraph, + rule: &LaxSpan, + candidate: &NodeEdgeMap, +) -> Hypergraph { + let mut in_image_nodes = vec![false; g.nodes.len()]; + for i in 0..candidate.nodes.source() { + let idx = candidate.nodes.table[i]; + in_image_nodes[idx] = true; + } + + let mut in_image_edges = vec![false; g.edges.len()]; + for i in 0..candidate.edges.source() { + let idx = candidate.edges.table[i]; + in_image_edges[idx] = true; + } + + let mut h = Hypergraph::empty(); + let mut node_map: Vec> = vec![None; g.nodes.len()]; + for (idx, label) in g.nodes.iter().enumerate() { + if in_image_nodes[idx] { + continue; + } + let new_id = h.new_node(label.clone()); + node_map[idx] = Some(new_id.0); } - Some(g.clone()) + for (edge_id, edge) in g.adjacency.iter().enumerate() { + if in_image_edges[edge_id] { + continue; + } + + let mut sources = Vec::with_capacity(edge.sources.len()); + for node in &edge.sources { + let new_id = match node_map[node.0] { + Some(existing) => NodeId(existing), + None => h.new_node(g.nodes[node.0].clone()), + }; + sources.push(new_id); + } + + let mut targets = Vec::with_capacity(edge.targets.len()); + for node in &edge.targets { + let new_id = match node_map[node.0] { + Some(existing) => NodeId(existing), + None => h.new_node(g.nodes[node.0].clone()), + }; + targets.push(new_id); + } + + h.new_edge(g.edges[edge_id].clone(), Hyperedge { sources, targets }); + } + + h.coproduct(&rule.apex) } fn validate_candidate_map( - left: &Hypergraph, + rule: &LaxSpan, g: &Hypergraph, candidate: &NodeEdgeMap, ) { - if candidate.nodes.source() != left.nodes.len() { + if candidate.nodes.source() != rule.left.nodes.len() { panic!( "candidate map node source size mismatch: got {}, expected {}", candidate.nodes.source(), - left.nodes.len() + rule.left.nodes.len() ); } if candidate.nodes.target() != g.nodes.len() { @@ -38,11 +89,11 @@ fn validate_candidate_map( g.nodes.len() ); } - if candidate.edges.source() != left.edges.len() { + if candidate.edges.source() != rule.left.edges.len() { panic!( "candidate map edge source size mismatch: got {}, expected {}", candidate.edges.source(), - left.edges.len() + rule.left.edges.len() ); } if candidate.edges.target() != g.edges.len() { From 10f204f7f6704652defbfe669681d3a8101abe51 Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 13:11:42 +0100 Subject: [PATCH 06/41] add test --- src/lax/rewrite.rs | 144 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index e863ff7..f53b3e9 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -172,3 +172,147 @@ fn dangling_condition( true } + +#[cfg(test)] +mod tests { + // Tests from working examples in + // Bonchi, Filippo, et al. + // "String diagram rewrite theory I: Rewriting with Frobenius structure." + // Journal of the ACM (JACM) 69.2 (2022): 1-58. + use super::exploded_context; + use crate::array::vec::{VecArray, VecKind}; + use crate::finite_function::FiniteFunction; + use crate::lax::{Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap}; + + fn empty_map(target: usize) -> FiniteFunction { + FiniteFunction::::new(VecArray(vec![]), target).unwrap() + } + + #[test] + fn test_exploded_context_construction() { + // From Session 4.5 Pushout Complements and Rewriting Modulo Frobenius + let f_label = "f".to_string(); + let g_label = "g".to_string(); + + let mut g: Hypergraph = Hypergraph::empty(); + let w1 = g.new_node("w1".to_string()); + let w2 = g.new_node("w2".to_string()); + let w3 = g.new_node("w3".to_string()); + let w4 = g.new_node("w4".to_string()); + let w5 = g.new_node("w5".to_string()); + + g.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + g.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + g.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w4], + }, + ); + g.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w4], + targets: vec![w5], + }, + ); + + let mut left: Hypergraph = Hypergraph::empty(); + left.new_node("a0".to_string()); + + let mut right: Hypergraph = Hypergraph::empty(); + let b0 = right.new_node("b0".to_string()); + let b2 = right.new_node("b2".to_string()); + let b1 = right.new_node("b1".to_string()); + right.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![b0], + targets: vec![b2], + }, + ); + right.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![b2], + targets: vec![b1], + }, + ); + + let mut apex: Hypergraph = Hypergraph::empty(); + apex.new_node("k0".to_string()); + apex.new_node("k1".to_string()); + + let left_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0]), left.nodes.len()).unwrap(), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 2]), right.nodes.len()).unwrap(), + edges: empty_map(right.edges.len()), + }; + let rule = LaxSpan::new(apex, left, right, left_map, right_map); + + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![3]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; + + let exploded = exploded_context(&g, &rule, &candidate); + + let mut expected: Hypergraph = Hypergraph::empty(); + let e_w1 = expected.new_node("w1".to_string()); + let e_w2 = expected.new_node("w2".to_string()); + let e_w3 = expected.new_node("w3".to_string()); + let e_w5 = expected.new_node("w5".to_string()); + let e_w4a = expected.new_node("w4".to_string()); + let e_w4b = expected.new_node("w4".to_string()); + + expected.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![e_w1], + targets: vec![e_w2], + }, + ); + expected.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![e_w2], + targets: vec![e_w3], + }, + ); + expected.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![e_w1], + targets: vec![e_w4a], + }, + ); + expected.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![e_w4b], + targets: vec![e_w5], + }, + ); + + expected.new_node("k0".to_string()); + expected.new_node("k1".to_string()); + + assert_eq!(exploded, expected); + } +} From a5736a32ac2611c0d3b230b80a55357015a8e4d8 Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 16:23:10 +0100 Subject: [PATCH 07/41] compute q --- src/lax/rewrite.rs | 56 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 49 insertions(+), 7 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index f53b3e9..57caa3f 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -1,4 +1,11 @@ -use crate::lax::{Arrow, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, NodeId}; +use crate::array::vec::{VecArray, VecKind}; +use crate::finite_function::FiniteFunction; +use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, NodeId}; + +struct ExplodedContext { + graph: Hypergraph, + map: NodeEdgeMap, +} /// Rewrite a lax hypergraph using a rule span and candidate map. pub fn rewrite( @@ -11,14 +18,14 @@ pub fn rewrite( return None; } let exploded = exploded_context(g, rule, candidate); - Some(exploded) + Some(exploded.graph) } fn exploded_context( g: &Hypergraph, rule: &LaxSpan, candidate: &NodeEdgeMap, -) -> Hypergraph { +) -> ExplodedContext { let mut in_image_nodes = vec![false; g.nodes.len()]; for i in 0..candidate.nodes.source() { let idx = candidate.nodes.table[i]; @@ -32,15 +39,18 @@ fn exploded_context( } let mut h = Hypergraph::empty(); + let mut h_node_to_g = Vec::new(); let mut node_map: Vec> = vec![None; g.nodes.len()]; for (idx, label) in g.nodes.iter().enumerate() { if in_image_nodes[idx] { continue; } let new_id = h.new_node(label.clone()); + h_node_to_g.push(idx); node_map[idx] = Some(new_id.0); } + let mut h_edge_to_g = Vec::new(); for (edge_id, edge) in g.adjacency.iter().enumerate() { if in_image_edges[edge_id] { continue; @@ -50,7 +60,11 @@ fn exploded_context( for node in &edge.sources { let new_id = match node_map[node.0] { Some(existing) => NodeId(existing), - None => h.new_node(g.nodes[node.0].clone()), + None => { + let new_id = h.new_node(g.nodes[node.0].clone()); + h_node_to_g.push(node.0); + new_id + } }; sources.push(new_id); } @@ -59,15 +73,43 @@ fn exploded_context( for node in &edge.targets { let new_id = match node_map[node.0] { Some(existing) => NodeId(existing), - None => h.new_node(g.nodes[node.0].clone()), + None => { + let new_id = h.new_node(g.nodes[node.0].clone()); + h_node_to_g.push(node.0); + new_id + } }; targets.push(new_id); } h.new_edge(g.edges[edge_id].clone(), Hyperedge { sources, targets }); + h_edge_to_g.push(edge_id); } - h.coproduct(&rule.apex) + let q_h_nodes = + FiniteFunction::::new(VecArray(h_node_to_g), g.nodes.len()).unwrap(); + let q_h_edges = + FiniteFunction::::new(VecArray(h_edge_to_g), g.edges.len()).unwrap(); + let q_k_nodes = rule + .left_map + .nodes + .compose(&candidate.nodes) + .expect("candidate map left nodes compose"); + let q_k_edges = rule + .left_map + .edges + .compose(&candidate.edges) + .expect("candidate map left edges compose"); + + let q = NodeEdgeMap { + nodes: q_h_nodes.coproduct(&q_k_nodes).expect("node coproduct"), + edges: q_h_edges.coproduct(&q_k_edges).expect("edge coproduct"), + }; + + ExplodedContext { + graph: h.coproduct(&rule.apex), + map: q, + } } fn validate_candidate_map( @@ -313,6 +355,6 @@ mod tests { expected.new_node("k0".to_string()); expected.new_node("k1".to_string()); - assert_eq!(exploded, expected); + assert_eq!(exploded.graph, expected); } } From d91619d5a28f458fb231ea030e87e536349bff0a Mon Sep 17 00:00:00 2001 From: mstn Date: Sat, 10 Jan 2026 17:20:31 +0100 Subject: [PATCH 08/41] compute fibers --- src/lax/rewrite.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 57caa3f..b80f81c 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -5,6 +5,7 @@ use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, struct ExplodedContext { graph: Hypergraph, map: NodeEdgeMap, + node_fibers: Vec>, } /// Rewrite a lax hypergraph using a rule span and candidate map. @@ -106,9 +107,15 @@ fn exploded_context( edges: q_h_edges.coproduct(&q_k_edges).expect("edge coproduct"), }; + let mut node_fibers = vec![Vec::new(); g.nodes.len()]; + for (src, &tgt) in q.nodes.table.iter().enumerate() { + node_fibers[tgt].push(NodeId(src)); + } + ExplodedContext { graph: h.coproduct(&rule.apex), map: q, + node_fibers, } } From 78571a4abb4219c41c3eaba6d26c91dd7dcaf447 Mon Sep 17 00:00:00 2001 From: mstn Date: Sun, 11 Jan 2026 13:15:57 +0100 Subject: [PATCH 09/41] wip: enumerate partitions --- src/lax/rewrite.rs | 458 +++++++++++++++++++++++++++++++++++++++------ 1 file changed, 400 insertions(+), 58 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index b80f81c..0baecd5 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -4,8 +4,8 @@ use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, struct ExplodedContext { graph: Hypergraph, - map: NodeEdgeMap, - node_fibers: Vec>, + to_g: NodeEdgeMap, + to_copied_plus_left: NodeEdgeMap, } /// Rewrite a lax hypergraph using a rule span and candidate map. @@ -19,6 +19,7 @@ pub fn rewrite( return None; } let exploded = exploded_context(g, rule, candidate); + let _fiber_inputs = fiber_partition_inputs(&exploded); Some(exploded.graph) } @@ -87,10 +88,8 @@ fn exploded_context( h_edge_to_g.push(edge_id); } - let q_h_nodes = - FiniteFunction::::new(VecArray(h_node_to_g), g.nodes.len()).unwrap(); - let q_h_edges = - FiniteFunction::::new(VecArray(h_edge_to_g), g.edges.len()).unwrap(); + let q_h_nodes = FiniteFunction::::new(VecArray(h_node_to_g), g.nodes.len()).unwrap(); + let q_h_edges = FiniteFunction::::new(VecArray(h_edge_to_g), g.edges.len()).unwrap(); let q_k_nodes = rule .left_map .nodes @@ -102,20 +101,241 @@ fn exploded_context( .compose(&candidate.edges) .expect("candidate map left edges compose"); - let q = NodeEdgeMap { + let to_g = NodeEdgeMap { nodes: q_h_nodes.coproduct(&q_k_nodes).expect("node coproduct"), edges: q_h_edges.coproduct(&q_k_edges).expect("edge coproduct"), }; - let mut node_fibers = vec![Vec::new(); g.nodes.len()]; - for (src, &tgt) in q.nodes.table.iter().enumerate() { - node_fibers[tgt].push(NodeId(src)); - } + let copied_nodes = h.nodes.len(); + let copied_edges = h.edges.len(); + let left_nodes = rule.left.nodes.len(); + let left_edges = rule.left.edges.len(); + let to_copied_plus_left = NodeEdgeMap { + nodes: FiniteFunction::::identity(copied_nodes) + .inject0(left_nodes) + .coproduct(&rule.left_map.nodes.inject1(copied_nodes)) + .expect("coproduct id + left nodes"), + edges: FiniteFunction::::identity(copied_edges) + .inject0(left_edges) + .coproduct(&rule.left_map.edges.inject1(copied_edges)) + .expect("coproduct id + left edges"), + }; ExplodedContext { graph: h.coproduct(&rule.apex), - map: q, - node_fibers, + to_g, + to_copied_plus_left, + } +} + +fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec { + let mut fibers = vec![Vec::new(); exploded.to_g.nodes.target()]; + for (src, &tgt) in exploded.to_g.nodes.table.iter().enumerate() { + fibers[tgt].push(NodeId(src)); + } + + fibers + .into_iter() + .filter(|nodes| !nodes.is_empty()) + .map(|nodes| { + // f' refines q, so f'-classes are contained within each q-fiber. + let mut class_index = vec![None; exploded.to_copied_plus_left.nodes.target()]; + let mut class_ids = Vec::with_capacity(nodes.len()); + let mut next_class = 0; + for node in &nodes { + let f_image = exploded.to_copied_plus_left.nodes.table[node.0]; + let id = match class_index[f_image] { + Some(existing) => existing, + None => { + let id = next_class; + next_class += 1; + class_index[f_image] = Some(id); + id + } + }; + class_ids.push(id); + } + + FiberPartitionInput { + nodes, + class_ids, + class_count: next_class, + } + }) + .collect() +} + +fn enumerate_fiber_partitions(fiber: &FiberPartitionInput) -> Vec { + let mut results = Vec::new(); + let mut blocks: Vec = Vec::new(); + let mut uf = UnionFind::new(fiber.class_count); + + fn all_connected(uf: &UnionFind) -> bool { + uf.components == 1 + } + + fn walk( + idx: usize, + fiber: &FiberPartitionInput, + blocks: &mut Vec, + uf: &mut UnionFind, + results: &mut Vec, + ) { + if idx == fiber.nodes.len() { + if all_connected(uf) { + let blocks = blocks + .iter() + .map(|b| FiberBlock { + nodes: b.nodes.clone(), + }) + .collect(); + results.push(FiberPartition { blocks }); + } + return; + } + + let node = fiber.nodes[idx]; + let class_id = fiber.class_ids[idx]; + + for i in 0..blocks.len() { + let snap = uf.snapshot(); + let (nodes_len, classes_len) = { + let block = &mut blocks[i]; + let nodes_len = block.nodes.len(); + let classes_len = block.classes.len(); + + block.nodes.push(node); + if !block.classes.contains(&class_id) { + if let Some(&rep) = block.classes.first() { + uf.union(rep, class_id); + } + block.classes.push(class_id); + } + + (nodes_len, classes_len) + }; + + walk(idx + 1, fiber, blocks, uf, results); + + uf.rollback(snap); + let block = &mut blocks[i]; + block.nodes.truncate(nodes_len); + block.classes.truncate(classes_len); + } + + blocks.push(BlockState { + nodes: vec![node], + classes: vec![class_id], + }); + walk(idx + 1, fiber, blocks, uf, results); + blocks.pop(); + } + + walk(0, fiber, &mut blocks, &mut uf, &mut results); + results +} + +struct FiberPartitionInput { + nodes: Vec, + class_ids: Vec, + class_count: usize, +} + +struct FiberPartition { + blocks: Vec, +} + +struct FiberBlock { + nodes: Vec, +} + +struct BlockState { + nodes: Vec, + classes: Vec, +} + +struct UnionFind { + parent: Vec, + size: Vec, + history: Vec, + components: usize, +} + +enum HistoryEntry { + Noop, + Merge { + root: usize, + parent: usize, + size_parent: usize, + }, +} + +impl UnionFind { + fn new(n: usize) -> Self { + Self { + parent: (0..n).collect(), + size: vec![1; n], + history: Vec::new(), + components: n, + } + } + + fn len(&self) -> usize { + self.parent.len() + } + + fn find(&mut self, x: usize) -> usize { + let mut node = x; + while self.parent[node] != node { + node = self.parent[node]; + } + node + } + + fn union(&mut self, x: usize, y: usize) { + let root_x = self.find(x); + let root_y = self.find(y); + if root_x == root_y { + self.history.push(HistoryEntry::Noop); + return; + } + + let (root, parent) = if self.size[root_x] >= self.size[root_y] { + (root_y, root_x) + } else { + (root_x, root_y) + }; + + self.history.push(HistoryEntry::Merge { + root, + parent, + size_parent: self.size[parent], + }); + + self.parent[root] = parent; + self.size[parent] += self.size[root]; + self.components -= 1; + } + + fn snapshot(&self) -> usize { + self.history.len() + } + + fn rollback(&mut self, snapshot: usize) { + while self.history.len() > snapshot { + match self.history.pop().expect("rollback history") { + HistoryEntry::Noop => {} + HistoryEntry::Merge { + root, + parent, + size_parent, + } => { + self.parent[root] = root; + self.size[parent] = size_parent; + self.components += 1; + } + } + } } } @@ -228,10 +448,13 @@ mod tests { // Bonchi, Filippo, et al. // "String diagram rewrite theory I: Rewriting with Frobenius structure." // Journal of the ACM (JACM) 69.2 (2022): 1-58. - use super::exploded_context; + use super::{ + enumerate_fiber_partitions, exploded_context, fiber_partition_inputs, FiberPartition, + }; use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; - use crate::lax::{Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap}; + use crate::lax::{Arrow, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, NodeId}; + use std::collections::HashMap; fn empty_map(target: usize) -> FiniteFunction { FiniteFunction::::new(VecArray(vec![]), target).unwrap() @@ -239,6 +462,167 @@ mod tests { #[test] fn test_exploded_context_construction() { + let (f_label, g_label, g, rule, candidate) = example_rewrite_input(); + let exploded = exploded_context(&g, &rule, &candidate); + + let mut expected: Hypergraph = Hypergraph::empty(); + let e_w1 = expected.new_node("w1".to_string()); + let e_w2 = expected.new_node("w2".to_string()); + let e_w3 = expected.new_node("w3".to_string()); + let e_w5 = expected.new_node("w5".to_string()); + let e_w4a = expected.new_node("w4".to_string()); + let e_w4b = expected.new_node("w4".to_string()); + + expected.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![e_w1], + targets: vec![e_w2], + }, + ); + expected.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![e_w2], + targets: vec![e_w3], + }, + ); + expected.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![e_w1], + targets: vec![e_w4a], + }, + ); + expected.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![e_w4b], + targets: vec![e_w5], + }, + ); + + expected.new_node("k0".to_string()); + expected.new_node("k1".to_string()); + + assert_eq!(exploded.graph, expected); + } + + #[test] + fn test_f_prime_refines_q() { + let (_f_label, _g_label, g, rule, candidate) = example_rewrite_input(); + let exploded = exploded_context(&g, &rule, &candidate); + + let mut f_prime_to_q = vec![None; exploded.to_copied_plus_left.nodes.target()]; + for (src, &f_prime_image) in exploded.to_copied_plus_left.nodes.table.iter().enumerate() { + let q_image = exploded.to_g.nodes.table[src]; + match f_prime_to_q[f_prime_image] { + Some(existing) => assert_eq!(existing, q_image), + None => f_prime_to_q[f_prime_image] = Some(q_image), + } + } + } + + #[test] + fn test_fiber_partitions_expected_blocks() { + let (_f_label, _g_label, g, rule, candidate) = example_rewrite_input(); + let exploded = exploded_context(&g, &rule, &candidate); + let fibers = fiber_partition_inputs(&exploded); + + let target_fiber = fibers + .iter() + .find(|fiber| { + let mut has_k0 = false; + let mut has_k1 = false; + for node in &fiber.nodes { + let label = &exploded.graph.nodes[node.0]; + has_k0 |= label == "k0"; + has_k1 |= label == "k1"; + } + has_k0 && has_k1 + }) + .expect("expected fiber containing k0 and k1"); + + let mut w4_nodes: Vec = target_fiber + .nodes + .iter() + .cloned() + .filter(|node| exploded.graph.nodes[node.0] == "w4") + .collect(); + w4_nodes.sort_by_key(|node| node.0); + + let mut name_map: HashMap = HashMap::new(); + name_map.insert(w4_nodes[0], "a0"); + name_map.insert(w4_nodes[1], "a1"); + for node in &target_fiber.nodes { + let label = &exploded.graph.nodes[node.0]; + if label == "k0" { + name_map.insert(*node, "k0"); + } else if label == "k1" { + name_map.insert(*node, "k1"); + } + } + + let partitions = enumerate_fiber_partitions(target_fiber); + let mut actual = partitions + .iter() + .map(|partition| normalize_partition(partition, &name_map)) + .collect::>(); + actual.sort(); + + let mut expected = vec![ + vec![vec!["a0", "k0"], vec!["a1", "k1"]], + vec![vec!["a0", "k1"], vec!["a1", "k0"]], + vec![vec!["k0"], vec!["a0", "a1", "k1"]], + vec![vec!["k1"], vec!["a0", "a1", "k0"]], + vec![vec!["a0", "a1", "k0", "k1"]], + ] + .into_iter() + .map(|blocks| { + let mut normalized = blocks + .into_iter() + .map(|mut block| { + block.sort(); + block + }) + .collect::>(); + normalized.sort(); + normalized + }) + .collect::>(); + expected.sort(); + + assert_eq!(actual, expected); + } + + fn normalize_partition( + partition: &FiberPartition, + name_map: &HashMap, + ) -> Vec> { + let mut blocks = partition + .blocks + .iter() + .map(|block| { + let mut names = block + .nodes + .iter() + .map(|node| *name_map.get(node).expect("name map")) + .collect::>(); + names.sort(); + names + }) + .collect::>(); + blocks.sort(); + blocks + } + + fn example_rewrite_input() -> ( + String, + String, + Hypergraph, + LaxSpan, + NodeEdgeMap, + ) { // From Session 4.5 Pushout Complements and Rewriting Modulo Frobenius let f_label = "f".to_string(); let g_label = "g".to_string(); @@ -320,48 +704,6 @@ mod tests { edges: empty_map(g.edges.len()), }; - let exploded = exploded_context(&g, &rule, &candidate); - - let mut expected: Hypergraph = Hypergraph::empty(); - let e_w1 = expected.new_node("w1".to_string()); - let e_w2 = expected.new_node("w2".to_string()); - let e_w3 = expected.new_node("w3".to_string()); - let e_w5 = expected.new_node("w5".to_string()); - let e_w4a = expected.new_node("w4".to_string()); - let e_w4b = expected.new_node("w4".to_string()); - - expected.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![e_w1], - targets: vec![e_w2], - }, - ); - expected.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![e_w2], - targets: vec![e_w3], - }, - ); - expected.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![e_w1], - targets: vec![e_w4a], - }, - ); - expected.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![e_w4b], - targets: vec![e_w5], - }, - ); - - expected.new_node("k0".to_string()); - expected.new_node("k1".to_string()); - - assert_eq!(exploded.graph, expected); + (f_label, g_label, g, rule, candidate) } } From fd34c2e6a8e29be18b0bbb640654356e58b8e4a7 Mon Sep 17 00:00:00 2001 From: mstn Date: Sun, 11 Jan 2026 17:51:12 +0100 Subject: [PATCH 10/41] compute pushout complements --- src/lax/rewrite.rs | 382 ++++++++++++++++++++++++++++++++++++++----- tests/lax/rewrite.rs | 3 +- 2 files changed, 341 insertions(+), 44 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 0baecd5..611bb0f 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -13,14 +13,90 @@ pub fn rewrite( g: &Hypergraph, rule: &LaxSpan, candidate: &NodeEdgeMap, -) -> Option> { +) -> Option>> { validate_candidate_map(rule, g, candidate); if !identification_condition(rule, candidate) || !dangling_condition(rule, candidate, g) { return None; } let exploded = exploded_context(g, rule, candidate); - let _fiber_inputs = fiber_partition_inputs(&exploded); - Some(exploded.graph) + let fiber_inputs = fiber_partition_inputs(&exploded); + let partitions_per_fiber: Vec> = fiber_inputs + .iter() + .map(enumerate_fiber_partitions) + .collect(); + + if partitions_per_fiber.is_empty() { + return Some(vec![exploded.graph]); + } + + let mut complements = Vec::new(); + let mut selection: Vec = Vec::with_capacity(partitions_per_fiber.len()); + + fn pushout_complement( + exploded: &ExplodedContext, + partitions_per_fiber: &[Vec], + selection: &[usize], + ) -> Hypergraph { + let mut quotient_left = Vec::new(); + let mut quotient_right = Vec::new(); + + for (fiber_idx, &partition_idx) in selection.iter().enumerate() { + let partition = &partitions_per_fiber[fiber_idx][partition_idx]; + for block in &partition.blocks { + let Some((first, rest)) = block.nodes.split_first() else { + continue; + }; + for node in rest { + quotient_left.push(*first); + quotient_right.push(*node); + } + } + } + + let mut complement = exploded.graph.clone(); + complement.quotient = (quotient_left, quotient_right); + complement.quotient(); + complement + } + + fn walk( + idx: usize, + exploded: &ExplodedContext, + partitions_per_fiber: &[Vec], + selection: &mut Vec, + complements: &mut Vec>, + ) { + if idx == partitions_per_fiber.len() { + complements.push(pushout_complement( + exploded, + partitions_per_fiber, + selection, + )); + return; + } + + for partition_idx in 0..partitions_per_fiber[idx].len() { + selection.push(partition_idx); + walk( + idx + 1, + exploded, + partitions_per_fiber, + selection, + complements, + ); + selection.pop(); + } + } + + walk( + 0, + &exploded, + &partitions_per_fiber, + &mut selection, + &mut complements, + ); + + Some(complements) } fn exploded_context( @@ -449,7 +525,8 @@ mod tests { // "String diagram rewrite theory I: Rewriting with Frobenius structure." // Journal of the ACM (JACM) 69.2 (2022): 1-58. use super::{ - enumerate_fiber_partitions, exploded_context, fiber_partition_inputs, FiberPartition, + enumerate_fiber_partitions, exploded_context, fiber_partition_inputs, rewrite, + FiberPartition, }; use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; @@ -466,12 +543,12 @@ mod tests { let exploded = exploded_context(&g, &rule, &candidate); let mut expected: Hypergraph = Hypergraph::empty(); - let e_w1 = expected.new_node("w1".to_string()); - let e_w2 = expected.new_node("w2".to_string()); - let e_w3 = expected.new_node("w3".to_string()); - let e_w5 = expected.new_node("w5".to_string()); - let e_w4a = expected.new_node("w4".to_string()); - let e_w4b = expected.new_node("w4".to_string()); + let e_w1 = expected.new_node("w".to_string()); + let e_w2 = expected.new_node("w".to_string()); + let e_w3 = expected.new_node("w".to_string()); + let e_w5 = expected.new_node("w".to_string()); + let e_w4a = expected.new_node("w".to_string()); + let e_w4b = expected.new_node("w".to_string()); expected.new_edge( f_label.clone(), @@ -502,8 +579,8 @@ mod tests { }, ); - expected.new_node("k0".to_string()); - expected.new_node("k1".to_string()); + expected.new_node("w".to_string()); + expected.new_node("w".to_string()); assert_eq!(exploded.graph, expected); } @@ -529,39 +606,40 @@ mod tests { let exploded = exploded_context(&g, &rule, &candidate); let fibers = fiber_partition_inputs(&exploded); + let copied_nodes = exploded.graph.nodes.len() - rule.apex.nodes.len(); let target_fiber = fibers .iter() .find(|fiber| { - let mut has_k0 = false; - let mut has_k1 = false; - for node in &fiber.nodes { - let label = &exploded.graph.nodes[node.0]; - has_k0 |= label == "k0"; - has_k1 |= label == "k1"; - } - has_k0 && has_k1 + let apex_count = fiber + .nodes + .iter() + .filter(|node| node.0 >= copied_nodes) + .count(); + apex_count == rule.apex.nodes.len() }) - .expect("expected fiber containing k0 and k1"); + .expect("expected fiber containing apex nodes"); - let mut w4_nodes: Vec = target_fiber + let mut apex_nodes = target_fiber .nodes .iter() .cloned() - .filter(|node| exploded.graph.nodes[node.0] == "w4") - .collect(); + .filter(|node| node.0 >= copied_nodes) + .collect::>(); + apex_nodes.sort_by_key(|node| node.0); + + let mut w4_nodes = target_fiber + .nodes + .iter() + .cloned() + .filter(|node| node.0 < copied_nodes) + .collect::>(); w4_nodes.sort_by_key(|node| node.0); let mut name_map: HashMap = HashMap::new(); name_map.insert(w4_nodes[0], "a0"); name_map.insert(w4_nodes[1], "a1"); - for node in &target_fiber.nodes { - let label = &exploded.graph.nodes[node.0]; - if label == "k0" { - name_map.insert(*node, "k0"); - } else if label == "k1" { - name_map.insert(*node, "k1"); - } - } + name_map.insert(apex_nodes[0], "k0"); + name_map.insert(apex_nodes[1], "k1"); let partitions = enumerate_fiber_partitions(target_fiber); let mut actual = partitions @@ -595,6 +673,224 @@ mod tests { assert_eq!(actual, expected); } + #[test] + fn test_rewrite_complements_working_example() { + let (f_label, g_label, g, rule, candidate) = example_rewrite_input(); + + let mut expected = Vec::new(); + + let mut h1: Hypergraph = Hypergraph::empty(); + let w1 = h1.new_node("w".to_string()); + let w2 = h1.new_node("w".to_string()); + let w3 = h1.new_node("w".to_string()); + let w5 = h1.new_node("w".to_string()); + let a0 = h1.new_node("w".to_string()); + let a1 = h1.new_node("w".to_string()); + let k0 = h1.new_node("w".to_string()); + let k1 = h1.new_node("w".to_string()); + h1.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + h1.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + h1.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![a0], + }, + ); + h1.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![a1], + targets: vec![w5], + }, + ); + h1.quotient = (vec![a1, a0], vec![k0, k1]); + h1.quotient(); + expected.push(h1); + + let mut h2: Hypergraph = Hypergraph::empty(); + let w1 = h2.new_node("w".to_string()); + let w2 = h2.new_node("w".to_string()); + let w3 = h2.new_node("w".to_string()); + let w5 = h2.new_node("w".to_string()); + let a0 = h2.new_node("w".to_string()); + let a1 = h2.new_node("w".to_string()); + let k0 = h2.new_node("w".to_string()); + let k1 = h2.new_node("w".to_string()); + h2.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + h2.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + h2.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![a0], + }, + ); + h2.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![a1], + targets: vec![w5], + }, + ); + h2.quotient = (vec![a1, a0], vec![k1, k0]); + h2.quotient(); + expected.push(h2); + + let mut h3: Hypergraph = Hypergraph::empty(); + let w1 = h3.new_node("w".to_string()); + let w2 = h3.new_node("w".to_string()); + let w3 = h3.new_node("w".to_string()); + let w5 = h3.new_node("w".to_string()); + let a0 = h3.new_node("w".to_string()); + let a1 = h3.new_node("w".to_string()); + let k0 = h3.new_node("w".to_string()); + let k1 = h3.new_node("w".to_string()); + h3.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + h3.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + h3.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![a0], + }, + ); + h3.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![a1], + targets: vec![w5], + }, + ); + h3.quotient = (vec![a0, a0], vec![a1, k1]); + h3.quotient(); + expected.push(h3); + + let mut h4: Hypergraph = Hypergraph::empty(); + let w1 = h4.new_node("w".to_string()); + let w2 = h4.new_node("w".to_string()); + let w3 = h4.new_node("w".to_string()); + let w5 = h4.new_node("w".to_string()); + let a0 = h4.new_node("w".to_string()); + let a1 = h4.new_node("w".to_string()); + let k0 = h4.new_node("w".to_string()); + let k1 = h4.new_node("w".to_string()); + h4.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + h4.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + h4.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![a0], + }, + ); + h4.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![a1], + targets: vec![w5], + }, + ); + h4.quotient = (vec![a0, a0], vec![a1, k0]); + h4.quotient(); + expected.push(h4); + + let mut h5: Hypergraph = Hypergraph::empty(); + let w1 = h5.new_node("w".to_string()); + let w2 = h5.new_node("w".to_string()); + let w3 = h5.new_node("w".to_string()); + let w5 = h5.new_node("w".to_string()); + let a0 = h5.new_node("w".to_string()); + let a1 = h5.new_node("w".to_string()); + let k0 = h5.new_node("w".to_string()); + let k1 = h5.new_node("w".to_string()); + h5.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + h5.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + h5.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![a0], + }, + ); + h5.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![a1], + targets: vec![w5], + }, + ); + h5.quotient = (vec![a0, a0, a0], vec![a1, k0, k1]); + h5.quotient(); + expected.push(h5); + + let complements = rewrite(&g, &rule, &candidate).expect("expected complements"); + assert_eq!(complements.len(), 5); + for expected_graph in expected { + assert!(complements.iter().any(|h| h == &expected_graph)); + } + } + fn normalize_partition( partition: &FiberPartition, name_map: &HashMap, @@ -628,11 +924,11 @@ mod tests { let g_label = "g".to_string(); let mut g: Hypergraph = Hypergraph::empty(); - let w1 = g.new_node("w1".to_string()); - let w2 = g.new_node("w2".to_string()); - let w3 = g.new_node("w3".to_string()); - let w4 = g.new_node("w4".to_string()); - let w5 = g.new_node("w5".to_string()); + let w1 = g.new_node("w".to_string()); + let w2 = g.new_node("w".to_string()); + let w3 = g.new_node("w".to_string()); + let w4 = g.new_node("w".to_string()); + let w5 = g.new_node("w".to_string()); g.new_edge( f_label.clone(), @@ -664,12 +960,12 @@ mod tests { ); let mut left: Hypergraph = Hypergraph::empty(); - left.new_node("a0".to_string()); + left.new_node("w".to_string()); let mut right: Hypergraph = Hypergraph::empty(); - let b0 = right.new_node("b0".to_string()); - let b2 = right.new_node("b2".to_string()); - let b1 = right.new_node("b1".to_string()); + let b0 = right.new_node("w".to_string()); + let b2 = right.new_node("w".to_string()); + let b1 = right.new_node("w".to_string()); right.new_edge( f_label.clone(), Hyperedge { @@ -686,8 +982,8 @@ mod tests { ); let mut apex: Hypergraph = Hypergraph::empty(); - apex.new_node("k0".to_string()); - apex.new_node("k1".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); let left_map = NodeEdgeMap { nodes: FiniteFunction::::new(VecArray(vec![0, 0]), left.nodes.len()).unwrap(), diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index b537881..d202698 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -87,5 +87,6 @@ fn test_rewrite_gluing_ok() { edges: empty_map(g.edges.len()), }; - assert!(rewrite(&g, &rule, &candidate).is_some()); + let complements = rewrite(&g, &rule, &candidate).expect("expected complements"); + assert!(!complements.is_empty()); } From c636567c9fc847910f7a2e1e9e24cab2d6b3e15c Mon Sep 17 00:00:00 2001 From: mstn Date: Sun, 11 Jan 2026 18:54:49 +0100 Subject: [PATCH 11/41] compute pushout --- src/lax/hypergraph.rs | 30 +++++ src/lax/rewrite.rs | 273 +++++++++++++++++++++++++++--------------- tests/lax/rewrite.rs | 6 +- 3 files changed, 208 insertions(+), 101 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 58ee778..b8adeb3 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -56,6 +56,12 @@ impl LaxSpan { /// Validate that maps are compatible with their hypergraphs. pub fn validate(self) -> Self { + if !self.apex.edges.is_empty() { + panic!( + "apex must be discrete (no edges), got {} edge(s)", + self.apex.edges.len() + ); + } if self.left_map.nodes.source() != self.apex.nodes.len() { panic!( "left map node source size mismatch: got {}, expected {}", @@ -115,6 +121,30 @@ impl LaxSpan { self } + + /// Compute the pushout of the span, identifying only nodes. + /// + /// NOTE: this assumes the apex is discrete (no edges), so edge identifications are ignored. + pub fn pushout(&self) -> Hypergraph + where + O: Clone + PartialEq, + A: Clone + PartialEq, + { + debug_assert!( + self.apex.edges.is_empty(), + "pushout assumes discrete apex (no edge identifications)" + ); + + let mut pushout = self.left.coproduct(&self.right); + let left_nodes = self.left.nodes.len(); + for (k_idx, &l_idx) in self.left_map.nodes.table.iter().enumerate() { + let r_idx = self.right_map.nodes.table[k_idx] + left_nodes; + pushout.quotient.0.push(NodeId(l_idx)); + pushout.quotient.1.push(NodeId(r_idx)); + } + pushout.quotient(); + pushout + } } /// Create a [`Hyperedge`] from a tuple of `(sources, targets)`. diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 611bb0f..03fff6f 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -13,10 +13,10 @@ pub fn rewrite( g: &Hypergraph, rule: &LaxSpan, candidate: &NodeEdgeMap, -) -> Option>> { +) -> Vec> { validate_candidate_map(rule, g, candidate); if !identification_condition(rule, candidate) || !dangling_condition(rule, candidate, g) { - return None; + return Vec::new(); } let exploded = exploded_context(g, rule, candidate); let fiber_inputs = fiber_partition_inputs(&exploded); @@ -26,14 +26,20 @@ pub fn rewrite( .collect(); if partitions_per_fiber.is_empty() { - return Some(vec![exploded.graph]); + return vec![pushout_result( + &exploded, + rule, + &partitions_per_fiber, + &[], + )]; } let mut complements = Vec::new(); let mut selection: Vec = Vec::with_capacity(partitions_per_fiber.len()); - fn pushout_complement( + fn pushout_result( exploded: &ExplodedContext, + rule: &LaxSpan, partitions_per_fiber: &[Vec], selection: &[usize], ) -> Hypergraph { @@ -55,20 +61,45 @@ pub fn rewrite( let mut complement = exploded.graph.clone(); complement.quotient = (quotient_left, quotient_right); - complement.quotient(); - complement + let q = complement.quotient(); + + let copied_nodes = exploded.graph.nodes.len() - rule.apex.nodes.len(); + let k_to_c_nodes = FiniteFunction::::new( + VecArray( + (0..rule.apex.nodes.len()) + .map(|idx| q.table[copied_nodes + idx]) + .collect(), + ), + complement.nodes.len(), + ) + .expect("k to complement map"); + let k_to_c = NodeEdgeMap { + nodes: k_to_c_nodes, + edges: FiniteFunction::::initial(complement.edges.len()), + }; + + let span = LaxSpan::new( + rule.apex.clone(), + rule.right.clone(), + complement, + rule.right_map.clone(), + k_to_c, + ); + span.pushout() } - fn walk( + fn walk( idx: usize, exploded: &ExplodedContext, + rule: &LaxSpan, partitions_per_fiber: &[Vec], selection: &mut Vec, - complements: &mut Vec>, + results: &mut Vec>, ) { if idx == partitions_per_fiber.len() { - complements.push(pushout_complement( + results.push(pushout_result( exploded, + rule, partitions_per_fiber, selection, )); @@ -80,9 +111,10 @@ pub fn rewrite( walk( idx + 1, exploded, + rule, partitions_per_fiber, selection, - complements, + results, ); selection.pop(); } @@ -91,12 +123,13 @@ pub fn rewrite( walk( 0, &exploded, + rule, &partitions_per_fiber, &mut selection, &mut complements, ); - Some(complements) + complements } fn exploded_context( @@ -676,215 +709,259 @@ mod tests { #[test] fn test_rewrite_complements_working_example() { let (f_label, g_label, g, rule, candidate) = example_rewrite_input(); - let mut expected = Vec::new(); - let mut h1: Hypergraph = Hypergraph::empty(); - let w1 = h1.new_node("w".to_string()); - let w2 = h1.new_node("w".to_string()); - let w3 = h1.new_node("w".to_string()); - let w5 = h1.new_node("w".to_string()); - let a0 = h1.new_node("w".to_string()); - let a1 = h1.new_node("w".to_string()); - let k0 = h1.new_node("w".to_string()); - let k1 = h1.new_node("w".to_string()); - h1.new_edge( + let mut c1: Hypergraph = Hypergraph::empty(); + let w1 = c1.new_node("w".to_string()); + let w2 = c1.new_node("w".to_string()); + let w3 = c1.new_node("w".to_string()); + let w5 = c1.new_node("w".to_string()); + let a0 = c1.new_node("w".to_string()); + let a1 = c1.new_node("w".to_string()); + let k0 = c1.new_node("w".to_string()); + let k1 = c1.new_node("w".to_string()); + c1.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![w2], }, ); - h1.new_edge( + c1.new_edge( g_label.clone(), Hyperedge { sources: vec![w2], targets: vec![w3], }, ); - h1.new_edge( + c1.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![a0], }, ); - h1.new_edge( + c1.new_edge( g_label.clone(), Hyperedge { sources: vec![a1], targets: vec![w5], }, ); - h1.quotient = (vec![a1, a0], vec![k0, k1]); - h1.quotient(); - expected.push(h1); - - let mut h2: Hypergraph = Hypergraph::empty(); - let w1 = h2.new_node("w".to_string()); - let w2 = h2.new_node("w".to_string()); - let w3 = h2.new_node("w".to_string()); - let w5 = h2.new_node("w".to_string()); - let a0 = h2.new_node("w".to_string()); - let a1 = h2.new_node("w".to_string()); - let k0 = h2.new_node("w".to_string()); - let k1 = h2.new_node("w".to_string()); - h2.new_edge( + c1.quotient = (vec![a1, a0], vec![k0, k1]); + let q1 = c1.quotient(); + let k0_c1 = NodeId(q1.table[k0.0]); + let k1_c1 = NodeId(q1.table[k1.0]); + let mut p1 = rule.right.coproduct(&c1); + let left_nodes = rule.right.nodes.len(); + p1.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); + p1.quotient.1.push(NodeId(k0_c1.0 + left_nodes)); + p1.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); + p1.quotient.1.push(NodeId(k1_c1.0 + left_nodes)); + p1.quotient(); + expected.push(p1); + + let mut c2: Hypergraph = Hypergraph::empty(); + let w1 = c2.new_node("w".to_string()); + let w2 = c2.new_node("w".to_string()); + let w3 = c2.new_node("w".to_string()); + let w5 = c2.new_node("w".to_string()); + let a0 = c2.new_node("w".to_string()); + let a1 = c2.new_node("w".to_string()); + let k0 = c2.new_node("w".to_string()); + let k1 = c2.new_node("w".to_string()); + c2.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![w2], }, ); - h2.new_edge( + c2.new_edge( g_label.clone(), Hyperedge { sources: vec![w2], targets: vec![w3], }, ); - h2.new_edge( + c2.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![a0], }, ); - h2.new_edge( + c2.new_edge( g_label.clone(), Hyperedge { sources: vec![a1], targets: vec![w5], }, ); - h2.quotient = (vec![a1, a0], vec![k1, k0]); - h2.quotient(); - expected.push(h2); - - let mut h3: Hypergraph = Hypergraph::empty(); - let w1 = h3.new_node("w".to_string()); - let w2 = h3.new_node("w".to_string()); - let w3 = h3.new_node("w".to_string()); - let w5 = h3.new_node("w".to_string()); - let a0 = h3.new_node("w".to_string()); - let a1 = h3.new_node("w".to_string()); - let k0 = h3.new_node("w".to_string()); - let k1 = h3.new_node("w".to_string()); - h3.new_edge( + c2.quotient = (vec![a1, a0], vec![k1, k0]); + let q2 = c2.quotient(); + let k0_c2 = NodeId(q2.table[k0.0]); + let k1_c2 = NodeId(q2.table[k1.0]); + let mut p2 = rule.right.coproduct(&c2); + let left_nodes = rule.right.nodes.len(); + p2.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); + p2.quotient.1.push(NodeId(k0_c2.0 + left_nodes)); + p2.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); + p2.quotient.1.push(NodeId(k1_c2.0 + left_nodes)); + p2.quotient(); + expected.push(p2); + + let mut c3: Hypergraph = Hypergraph::empty(); + let w1 = c3.new_node("w".to_string()); + let w2 = c3.new_node("w".to_string()); + let w3 = c3.new_node("w".to_string()); + let w5 = c3.new_node("w".to_string()); + let a0 = c3.new_node("w".to_string()); + let a1 = c3.new_node("w".to_string()); + let k0 = c3.new_node("w".to_string()); + let k1 = c3.new_node("w".to_string()); + c3.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![w2], }, ); - h3.new_edge( + c3.new_edge( g_label.clone(), Hyperedge { sources: vec![w2], targets: vec![w3], }, ); - h3.new_edge( + c3.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![a0], }, ); - h3.new_edge( + c3.new_edge( g_label.clone(), Hyperedge { sources: vec![a1], targets: vec![w5], }, ); - h3.quotient = (vec![a0, a0], vec![a1, k1]); - h3.quotient(); - expected.push(h3); - - let mut h4: Hypergraph = Hypergraph::empty(); - let w1 = h4.new_node("w".to_string()); - let w2 = h4.new_node("w".to_string()); - let w3 = h4.new_node("w".to_string()); - let w5 = h4.new_node("w".to_string()); - let a0 = h4.new_node("w".to_string()); - let a1 = h4.new_node("w".to_string()); - let k0 = h4.new_node("w".to_string()); - let k1 = h4.new_node("w".to_string()); - h4.new_edge( + c3.quotient = (vec![a0, a0], vec![a1, k1]); + let q3 = c3.quotient(); + let k0_c3 = NodeId(q3.table[k0.0]); + let k1_c3 = NodeId(q3.table[k1.0]); + let mut p3 = rule.right.coproduct(&c3); + let left_nodes = rule.right.nodes.len(); + p3.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); + p3.quotient.1.push(NodeId(k0_c3.0 + left_nodes)); + p3.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); + p3.quotient.1.push(NodeId(k1_c3.0 + left_nodes)); + p3.quotient(); + expected.push(p3); + + let mut c4: Hypergraph = Hypergraph::empty(); + let w1 = c4.new_node("w".to_string()); + let w2 = c4.new_node("w".to_string()); + let w3 = c4.new_node("w".to_string()); + let w5 = c4.new_node("w".to_string()); + let a0 = c4.new_node("w".to_string()); + let a1 = c4.new_node("w".to_string()); + let k0 = c4.new_node("w".to_string()); + let k1 = c4.new_node("w".to_string()); + c4.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![w2], }, ); - h4.new_edge( + c4.new_edge( g_label.clone(), Hyperedge { sources: vec![w2], targets: vec![w3], }, ); - h4.new_edge( + c4.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![a0], }, ); - h4.new_edge( + c4.new_edge( g_label.clone(), Hyperedge { sources: vec![a1], targets: vec![w5], }, ); - h4.quotient = (vec![a0, a0], vec![a1, k0]); - h4.quotient(); - expected.push(h4); - - let mut h5: Hypergraph = Hypergraph::empty(); - let w1 = h5.new_node("w".to_string()); - let w2 = h5.new_node("w".to_string()); - let w3 = h5.new_node("w".to_string()); - let w5 = h5.new_node("w".to_string()); - let a0 = h5.new_node("w".to_string()); - let a1 = h5.new_node("w".to_string()); - let k0 = h5.new_node("w".to_string()); - let k1 = h5.new_node("w".to_string()); - h5.new_edge( + c4.quotient = (vec![a0, a0], vec![a1, k0]); + let q4 = c4.quotient(); + let k0_c4 = NodeId(q4.table[k0.0]); + let k1_c4 = NodeId(q4.table[k1.0]); + let mut p4 = rule.right.coproduct(&c4); + let left_nodes = rule.right.nodes.len(); + p4.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); + p4.quotient.1.push(NodeId(k0_c4.0 + left_nodes)); + p4.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); + p4.quotient.1.push(NodeId(k1_c4.0 + left_nodes)); + p4.quotient(); + expected.push(p4); + + let mut c5: Hypergraph = Hypergraph::empty(); + let w1 = c5.new_node("w".to_string()); + let w2 = c5.new_node("w".to_string()); + let w3 = c5.new_node("w".to_string()); + let w5 = c5.new_node("w".to_string()); + let a0 = c5.new_node("w".to_string()); + let a1 = c5.new_node("w".to_string()); + let k0 = c5.new_node("w".to_string()); + let k1 = c5.new_node("w".to_string()); + c5.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![w2], }, ); - h5.new_edge( + c5.new_edge( g_label.clone(), Hyperedge { sources: vec![w2], targets: vec![w3], }, ); - h5.new_edge( + c5.new_edge( f_label.clone(), Hyperedge { sources: vec![w1], targets: vec![a0], }, ); - h5.new_edge( + c5.new_edge( g_label.clone(), Hyperedge { sources: vec![a1], targets: vec![w5], }, ); - h5.quotient = (vec![a0, a0, a0], vec![a1, k0, k1]); - h5.quotient(); - expected.push(h5); - - let complements = rewrite(&g, &rule, &candidate).expect("expected complements"); + c5.quotient = (vec![a0, a0, a0], vec![a1, k0, k1]); + let q5 = c5.quotient(); + let k0_c5 = NodeId(q5.table[k0.0]); + let k1_c5 = NodeId(q5.table[k1.0]); + let mut p5 = rule.right.coproduct(&c5); + let left_nodes = rule.right.nodes.len(); + p5.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); + p5.quotient.1.push(NodeId(k0_c5.0 + left_nodes)); + p5.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); + p5.quotient.1.push(NodeId(k1_c5.0 + left_nodes)); + p5.quotient(); + expected.push(p5); + + let complements = rewrite(&g, &rule, &candidate); assert_eq!(complements.len(), 5); for expected_graph in expected { assert!(complements.iter().any(|h| h == &expected_graph)); diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index d202698..42e971e 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -40,7 +40,7 @@ fn test_rewrite_identification_fails() { edges: empty_map(g.edges.len()), }; - assert!(rewrite(&g, &rule, &candidate).is_none()); + assert!(rewrite(&g, &rule, &candidate).is_empty()); } #[test] @@ -67,7 +67,7 @@ fn test_rewrite_dangling_fails() { edges: empty_map(g.edges.len()), }; - assert!(rewrite(&g, &rule, &candidate).is_none()); + assert!(rewrite(&g, &rule, &candidate).is_empty()); } #[test] @@ -87,6 +87,6 @@ fn test_rewrite_gluing_ok() { edges: empty_map(g.edges.len()), }; - let complements = rewrite(&g, &rule, &candidate).expect("expected complements"); + let complements = rewrite(&g, &rule, &candidate); assert!(!complements.is_empty()); } From 196b5c14dece44584375a2a8fa738f7297e23d0f Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 06:43:54 +0100 Subject: [PATCH 12/41] remove LaxHypergraphArrow --- src/lax/hypergraph.rs | 225 ---------------------------------------- tests/lax/hypergraph.rs | 59 +---------- 2 files changed, 1 insertion(+), 283 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index b8adeb3..006fa21 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -1,5 +1,4 @@ use crate::array::vec::{VecArray, VecKind}; -use crate::category::Arrow; use crate::finite_function::*; use core::fmt::Debug; @@ -198,230 +197,6 @@ pub struct Hypergraph { } /// A lax morphism of hypergraphs that preserves structure up to the target quotient. -pub struct LaxHypergraphArrow { - /// Source hypergraph - pub source: Hypergraph, - - /// Target hypergraph - pub target: Hypergraph, - - /// Map on nodes - pub w: FiniteFunction, - - /// Map on edges - pub x: FiniteFunction, -} - -fn class_labels( - nodes: &[O], - q: &FiniteFunction, -) -> Result, (NodeId, NodeId)> { - let mut classes: Vec> = vec![None; q.target()]; - for (i, label) in nodes.iter().enumerate() { - let class = q.table[i]; - match &classes[class] { - None => classes[class] = Some((label.clone(), NodeId(i))), - Some((existing, existing_id)) => { - if existing != label { - return Err((NodeId(i), *existing_id)); - } - } - } - } - Ok(classes - .into_iter() - .map(|entry| entry.expect("each class has at least one node").0) - .collect()) -} - -impl LaxHypergraphArrow { - pub fn new( - source: Hypergraph, - target: Hypergraph, - w: FiniteFunction, - x: FiniteFunction, - ) -> Self { - LaxHypergraphArrow { - source, - target, - w, - x, - } - } - - /// Convert this lax morphism to a strict one by quotienting source/target. - /// - /// This panics if the morphism is invalid - pub fn to_strict(self) -> crate::strict::hypergraph::HypergraphArrow { - if self.w.source() != self.source.nodes.len() { - panic!( - "node map source size mismatch: got {}, expected {}", - self.w.source(), - self.source.nodes.len() - ); - } - if self.w.target() != self.target.nodes.len() { - panic!( - "node map target size mismatch: got {}, expected {}", - self.w.target(), - self.target.nodes.len() - ); - } - if self.x.source() != self.source.edges.len() { - panic!( - "edge map source size mismatch: got {}, expected {}", - self.x.source(), - self.source.edges.len() - ); - } - if self.x.target() != self.target.edges.len() { - panic!( - "edge map target size mismatch: got {}, expected {}", - self.x.target(), - self.target.edges.len() - ); - } - - let q_src = self.source.coequalizer(); - let q_tgt = self.target.coequalizer(); - - let _source_labels = class_labels(&self.source.nodes, &q_src).unwrap_or_else(|(a, b)| { - panic!( - "source quotient has conflicting node labels between {:?} and {:?}", - a, b - ) - }); - let target_labels = class_labels(&self.target.nodes, &q_tgt).unwrap_or_else(|(a, b)| { - panic!( - "target quotient has conflicting node labels between {:?} and {:?}", - a, b - ) - }); - - for (i, src_label) in self.source.nodes.iter().enumerate() { - let tgt_node = self.w.table[i]; - let class = q_tgt.table[tgt_node]; - if *src_label != target_labels[class] { - panic!( - "node label mismatch: source node {:?} does not match target class {}", - NodeId(i), - class - ); - } - } - - let mut class_image: Vec> = vec![None; q_src.target()]; - for i in 0..self.source.nodes.len() { - let class = q_src.table[i]; - let image_class = q_tgt.table[self.w.table[i]]; - match class_image[class] { - None => class_image[class] = Some((image_class, NodeId(i))), - Some((existing_class, existing_id)) => { - if existing_class != image_class { - panic!( - "node map not constant on source quotient: {:?}->{}, {:?}->{}", - NodeId(i), - image_class, - existing_id, - existing_class - ); - } - } - } - } - - for (i, src_label) in self.source.edges.iter().enumerate() { - let tgt_edge = self.x.table[i]; - if *src_label != self.target.edges[tgt_edge] { - panic!( - "edge label mismatch: source edge {:?} does not match target edge {:?}", - EdgeId(i), - EdgeId(tgt_edge) - ); - } - - let src_edge = &self.source.adjacency[i]; - let tgt_edge_adj = &self.target.adjacency[tgt_edge]; - - if src_edge.sources.len() != tgt_edge_adj.sources.len() { - panic!( - "source arity mismatch: source edge {:?} vs target edge {:?}", - EdgeId(i), - EdgeId(tgt_edge) - ); - } - if src_edge.targets.len() != tgt_edge_adj.targets.len() { - panic!( - "target arity mismatch: source edge {:?} vs target edge {:?}", - EdgeId(i), - EdgeId(tgt_edge) - ); - } - - let src_sources_classes: Vec = src_edge - .sources - .iter() - .map(|n| q_tgt.table[self.w.table[n.0]]) - .collect(); - let tgt_sources_classes: Vec = tgt_edge_adj - .sources - .iter() - .map(|n| q_tgt.table[n.0]) - .collect(); - if src_sources_classes != tgt_sources_classes { - panic!( - "source incidence mismatch: source edge {:?} vs target edge {:?}", - EdgeId(i), - EdgeId(tgt_edge) - ); - } - - let src_targets_classes: Vec = src_edge - .targets - .iter() - .map(|n| q_tgt.table[self.w.table[n.0]]) - .collect(); - let tgt_targets_classes: Vec = tgt_edge_adj - .targets - .iter() - .map(|n| q_tgt.table[n.0]) - .collect(); - if src_targets_classes != tgt_targets_classes { - panic!( - "target incidence mismatch: source edge {:?} vs target edge {:?}", - EdgeId(i), - EdgeId(tgt_edge) - ); - } - } - - let mut source_h = self.source.clone(); - source_h.quotient(); - let mut target_h = self.target.clone(); - target_h.quotient(); - - let source_strict = make_hypergraph(&source_h) - .validate() - .unwrap_or_else(|err| panic!("invalid source hypergraph after quotient: {:?}", err)); - let target_strict = make_hypergraph(&target_h) - .validate() - .unwrap_or_else(|err| panic!("invalid target hypergraph after quotient: {:?}", err)); - - let w_composed = (&self.w >> &q_tgt).expect("node map already checked"); - let w_table = - coequalizer_universal(&q_src, &w_composed.table).expect("compatibility checked"); - let w_strict = FiniteFunction::new(w_table, q_tgt.target()).expect("by construction"); - - crate::strict::hypergraph::HypergraphArrow::new( - source_strict, - target_strict, - w_strict, - self.x, - ) - .unwrap_or_else(|err| panic!("strict morphism naturality failed: {:?}", err)) - } -} - impl Hypergraph { /// The empty Hypergraph with no nodes or edges. pub fn empty() -> Self { diff --git a/tests/lax/hypergraph.rs b/tests/lax/hypergraph.rs index 242e73a..b3a5cba 100644 --- a/tests/lax/hypergraph.rs +++ b/tests/lax/hypergraph.rs @@ -1,6 +1,4 @@ -use open_hypergraphs::array::vec::{VecArray, VecKind}; -use open_hypergraphs::finite_function::FiniteFunction; -use open_hypergraphs::lax::{Arrow, Hyperedge, Hypergraph, LaxHypergraphArrow, NodeId}; +use open_hypergraphs::lax::{Hyperedge, Hypergraph, NodeId}; #[test] fn test_delete_nodes_remap_and_quotient() { @@ -84,58 +82,3 @@ fn test_delete_nodes_ignores_out_of_range() { assert_eq!(h.quotient.0, vec![NodeId(0)]); assert_eq!(h.quotient.1, vec![NodeId(1)]); } - -#[test] -fn test_lax_hypergraph_arrow_to_strict_identity() { - let mut source = Hypergraph::empty(); - source.nodes = vec![1, 2]; - source.edges = vec![10]; - source.adjacency = vec![Hyperedge { - sources: vec![NodeId(0)], - targets: vec![NodeId(1)], - }]; - - let mut target = Hypergraph::empty(); - target.nodes = vec![1, 2]; - target.edges = vec![10]; - target.adjacency = vec![Hyperedge { - sources: vec![NodeId(0)], - targets: vec![NodeId(1)], - }]; - - let w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); - let x = FiniteFunction::::new(VecArray(vec![0]), 1).unwrap(); - - let arrow = LaxHypergraphArrow::new(source, target, w, x).to_strict(); - - assert_eq!(arrow.w.table, VecArray(vec![0, 1])); - assert_eq!(arrow.w.target(), 2); - assert_eq!(arrow.x.table, VecArray(vec![0])); - assert_eq!(arrow.x.target(), 1); -} - -#[test] -#[should_panic(expected = "node map not constant on source quotient")] -fn test_lax_hypergraph_arrow_to_strict_panics_on_nonconstant_w() { - let mut source = Hypergraph::empty(); - source.nodes = vec![1, 1]; - source.edges = vec![10]; - source.adjacency = vec![Hyperedge { - sources: vec![NodeId(0)], - targets: vec![NodeId(1)], - }]; - source.quotient = (vec![NodeId(0)], vec![NodeId(1)]); - - let mut target = Hypergraph::empty(); - target.nodes = vec![1, 1]; - target.edges = vec![10]; - target.adjacency = vec![Hyperedge { - sources: vec![NodeId(0)], - targets: vec![NodeId(1)], - }]; - - let w = FiniteFunction::::new(VecArray(vec![0, 1]), 2).unwrap(); - let x = FiniteFunction::::new(VecArray(vec![0]), 1).unwrap(); - - let _ = LaxHypergraphArrow::new(source, target, w, x).to_strict(); -} From 3963863f7b58d71f8049c2b92bac74e6e85f8b73 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 07:00:04 +0100 Subject: [PATCH 13/41] use borrowing for span since it is read only --- src/lax/hypergraph.rs | 30 ++++++++++++------------ src/lax/rewrite.rs | 54 ++++++++++++++++++++++++------------------- tests/lax/rewrite.rs | 19 ++++++++------- 3 files changed, 56 insertions(+), 47 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 006fa21..26dfe78 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -25,25 +25,25 @@ pub struct NodeEdgeMap { pub edges: FiniteFunction, } -#[derive(Clone)] -pub struct LaxSpan { - pub apex: Hypergraph, - pub left: Hypergraph, - pub right: Hypergraph, - pub left_map: NodeEdgeMap, - pub right_map: NodeEdgeMap, +#[derive(Clone, Copy)] +pub struct Span<'a, O, A> { + pub apex: &'a Hypergraph, + pub left: &'a Hypergraph, + pub right: &'a Hypergraph, + pub left_map: &'a NodeEdgeMap, + pub right_map: &'a NodeEdgeMap, } -impl LaxSpan { +impl<'a, O, A> Span<'a, O, A> { /// Construct a lax span and validate its structural properties. pub fn new( - apex: Hypergraph, - left: Hypergraph, - right: Hypergraph, - left_map: NodeEdgeMap, - right_map: NodeEdgeMap, + apex: &'a Hypergraph, + left: &'a Hypergraph, + right: &'a Hypergraph, + left_map: &'a NodeEdgeMap, + right_map: &'a NodeEdgeMap, ) -> Self { - LaxSpan { + Span { apex, left, right, @@ -134,7 +134,7 @@ impl LaxSpan { "pushout assumes discrete apex (no edge identifications)" ); - let mut pushout = self.left.coproduct(&self.right); + let mut pushout = self.left.coproduct(self.right); let left_nodes = self.left.nodes.len(); for (k_idx, &l_idx) in self.left_map.nodes.table.iter().enumerate() { let r_idx = self.right_map.nodes.table[k_idx] + left_nodes; diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 03fff6f..922bb08 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -1,6 +1,6 @@ use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; -use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, NodeId}; +use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; struct ExplodedContext { graph: Hypergraph, @@ -11,7 +11,7 @@ struct ExplodedContext { /// Rewrite a lax hypergraph using a rule span and candidate map. pub fn rewrite( g: &Hypergraph, - rule: &LaxSpan, + rule: &Span<'_, O, A>, candidate: &NodeEdgeMap, ) -> Vec> { validate_candidate_map(rule, g, candidate); @@ -39,7 +39,7 @@ pub fn rewrite( fn pushout_result( exploded: &ExplodedContext, - rule: &LaxSpan, + rule: &Span<'_, O, A>, partitions_per_fiber: &[Vec], selection: &[usize], ) -> Hypergraph { @@ -78,20 +78,14 @@ pub fn rewrite( edges: FiniteFunction::::initial(complement.edges.len()), }; - let span = LaxSpan::new( - rule.apex.clone(), - rule.right.clone(), - complement, - rule.right_map.clone(), - k_to_c, - ); + let span = Span::new(rule.apex, rule.right, &complement, rule.right_map, &k_to_c); span.pushout() } fn walk( idx: usize, exploded: &ExplodedContext, - rule: &LaxSpan, + rule: &Span<'_, O, A>, partitions_per_fiber: &[Vec], selection: &mut Vec, results: &mut Vec>, @@ -134,7 +128,7 @@ pub fn rewrite( fn exploded_context( g: &Hypergraph, - rule: &LaxSpan, + rule: &Span<'_, O, A>, candidate: &NodeEdgeMap, ) -> ExplodedContext { let mut in_image_nodes = vec![false; g.nodes.len()]; @@ -449,7 +443,7 @@ impl UnionFind { } fn validate_candidate_map( - rule: &LaxSpan, + rule: &Span<'_, O, A>, g: &Hypergraph, candidate: &NodeEdgeMap, ) { @@ -483,7 +477,7 @@ fn validate_candidate_map( } } -fn identification_condition(rule: &LaxSpan, candidate: &NodeEdgeMap) -> bool { +fn identification_condition(rule: &Span<'_, O, A>, candidate: &NodeEdgeMap) -> bool { let mut in_image = vec![false; rule.left.nodes.len()]; for i in 0..rule.left_map.nodes.source() { let idx = rule.left_map.nodes.table[i]; @@ -509,7 +503,7 @@ fn identification_condition(rule: &LaxSpan, candidate: &NodeEdgeMap) } fn dangling_condition( - rule: &LaxSpan, + rule: &Span<'_, O, A>, candidate: &NodeEdgeMap, g: &Hypergraph, ) -> bool { @@ -563,7 +557,7 @@ mod tests { }; use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; - use crate::lax::{Arrow, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap, NodeId}; + use crate::lax::{Arrow, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; use std::collections::HashMap; fn empty_map(target: usize) -> FiniteFunction { @@ -572,7 +566,9 @@ mod tests { #[test] fn test_exploded_context_construction() { - let (f_label, g_label, g, rule, candidate) = example_rewrite_input(); + let (f_label, g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); let exploded = exploded_context(&g, &rule, &candidate); let mut expected: Hypergraph = Hypergraph::empty(); @@ -620,7 +616,9 @@ mod tests { #[test] fn test_f_prime_refines_q() { - let (_f_label, _g_label, g, rule, candidate) = example_rewrite_input(); + let (_f_label, _g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); let exploded = exploded_context(&g, &rule, &candidate); let mut f_prime_to_q = vec![None; exploded.to_copied_plus_left.nodes.target()]; @@ -635,7 +633,9 @@ mod tests { #[test] fn test_fiber_partitions_expected_blocks() { - let (_f_label, _g_label, g, rule, candidate) = example_rewrite_input(); + let (_f_label, _g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); let exploded = exploded_context(&g, &rule, &candidate); let fibers = fiber_partition_inputs(&exploded); @@ -708,7 +708,9 @@ mod tests { #[test] fn test_rewrite_complements_working_example() { - let (f_label, g_label, g, rule, candidate) = example_rewrite_input(); + let (f_label, g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); let mut expected = Vec::new(); let mut c1: Hypergraph = Hypergraph::empty(); @@ -993,7 +995,11 @@ mod tests { String, String, Hypergraph, - LaxSpan, + Hypergraph, + Hypergraph, + Hypergraph, + NodeEdgeMap, + NodeEdgeMap, NodeEdgeMap, ) { // From Session 4.5 Pushout Complements and Rewriting Modulo Frobenius @@ -1070,13 +1076,13 @@ mod tests { nodes: FiniteFunction::::new(VecArray(vec![0, 2]), right.nodes.len()).unwrap(), edges: empty_map(right.edges.len()), }; - let rule = LaxSpan::new(apex, left, right, left_map, right_map); - let candidate = NodeEdgeMap { nodes: FiniteFunction::::new(VecArray(vec![3]), g.nodes.len()).unwrap(), edges: empty_map(g.edges.len()), }; - (f_label, g_label, g, rule, candidate) + ( + f_label, g_label, g, apex, left, right, left_map, right_map, candidate, + ) } } diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index 42e971e..a7c9727 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -1,15 +1,15 @@ use open_hypergraphs::array::vec::{VecArray, VecKind}; use open_hypergraphs::finite_function::FiniteFunction; -use open_hypergraphs::lax::{rewrite, Hyperedge, Hypergraph, LaxSpan, NodeEdgeMap}; +use open_hypergraphs::lax::{rewrite, Hyperedge, Hypergraph, NodeEdgeMap, Span}; fn empty_map(target: usize) -> FiniteFunction { FiniteFunction::::new(VecArray(vec![]), target).unwrap() } fn span_with_empty_apex( - left: Hypergraph, - right: Hypergraph, -) -> LaxSpan { + left: &Hypergraph, + right: &Hypergraph, +) -> (Hypergraph, NodeEdgeMap, NodeEdgeMap) { let apex: Hypergraph = Hypergraph::empty(); let left_map = NodeEdgeMap { nodes: empty_map(left.nodes.len()), @@ -19,7 +19,7 @@ fn span_with_empty_apex( nodes: empty_map(right.nodes.len()), edges: empty_map(right.edges.len()), }; - LaxSpan::new(apex, left, right, left_map, right_map) + (apex, left_map, right_map) } #[test] @@ -30,7 +30,8 @@ fn test_rewrite_identification_fails() { l.new_node(2); let r: Hypergraph = Hypergraph::empty(); - let rule = span_with_empty_apex(l.clone(), r); + let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); let mut g: Hypergraph = Hypergraph::empty(); g.new_node(1); @@ -50,7 +51,8 @@ fn test_rewrite_dangling_fails() { l.new_node(1); let r: Hypergraph = Hypergraph::empty(); - let rule = span_with_empty_apex(l.clone(), r); + let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); let mut g: Hypergraph = Hypergraph::empty(); let v = g.new_node(1); @@ -77,7 +79,8 @@ fn test_rewrite_gluing_ok() { l.new_node(1); let r: Hypergraph = Hypergraph::empty(); - let rule = span_with_empty_apex(l.clone(), r); + let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); let mut g: Hypergraph = Hypergraph::empty(); g.new_node(1); From 39defa0b8b4a99aab227a966d84b9c72a615d45f Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 07:01:27 +0100 Subject: [PATCH 14/41] edit --- src/lax/hypergraph.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 26dfe78..e9e6721 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -196,7 +196,6 @@ pub struct Hypergraph { pub quotient: (Vec, Vec), } -/// A lax morphism of hypergraphs that preserves structure up to the target quotient. impl Hypergraph { /// The empty Hypergraph with no nodes or edges. pub fn empty() -> Self { From d093c206ed6ad9cd31d7cf37cc8c405249566f32 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 07:06:03 +0100 Subject: [PATCH 15/41] fix --- src/lax/hypergraph.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index e9e6721..28710d3 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -1,5 +1,6 @@ use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::*; +use crate::lax::Arrow; use core::fmt::Debug; From d3d899f240cbd4e79e558c26ab99293b6ff0f9de Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 07:09:15 +0100 Subject: [PATCH 16/41] move UnionFind --- src/lax/rewrite.rs | 88 ++------------------------------------------- src/lib.rs | 1 + src/union_find.rs | 90 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 86 deletions(-) create mode 100644 src/union_find.rs diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 922bb08..a3c7d59 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -1,6 +1,7 @@ use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; +use crate::union_find::UnionFind; struct ExplodedContext { graph: Hypergraph, @@ -274,7 +275,7 @@ fn enumerate_fiber_partitions(fiber: &FiberPartitionInput) -> Vec bool { - uf.components == 1 + uf.components() == 1 } fn walk( @@ -357,91 +358,6 @@ struct BlockState { classes: Vec, } -struct UnionFind { - parent: Vec, - size: Vec, - history: Vec, - components: usize, -} - -enum HistoryEntry { - Noop, - Merge { - root: usize, - parent: usize, - size_parent: usize, - }, -} - -impl UnionFind { - fn new(n: usize) -> Self { - Self { - parent: (0..n).collect(), - size: vec![1; n], - history: Vec::new(), - components: n, - } - } - - fn len(&self) -> usize { - self.parent.len() - } - - fn find(&mut self, x: usize) -> usize { - let mut node = x; - while self.parent[node] != node { - node = self.parent[node]; - } - node - } - - fn union(&mut self, x: usize, y: usize) { - let root_x = self.find(x); - let root_y = self.find(y); - if root_x == root_y { - self.history.push(HistoryEntry::Noop); - return; - } - - let (root, parent) = if self.size[root_x] >= self.size[root_y] { - (root_y, root_x) - } else { - (root_x, root_y) - }; - - self.history.push(HistoryEntry::Merge { - root, - parent, - size_parent: self.size[parent], - }); - - self.parent[root] = parent; - self.size[parent] += self.size[root]; - self.components -= 1; - } - - fn snapshot(&self) -> usize { - self.history.len() - } - - fn rollback(&mut self, snapshot: usize) { - while self.history.len() > snapshot { - match self.history.pop().expect("rollback history") { - HistoryEntry::Noop => {} - HistoryEntry::Merge { - root, - parent, - size_parent, - } => { - self.parent[root] = root; - self.size[parent] = size_parent; - self.components += 1; - } - } - } - } -} - fn validate_candidate_map( rule: &Span<'_, O, A>, g: &Hypergraph, diff --git a/src/lib.rs b/src/lib.rs index 876911f..06a74b9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -260,6 +260,7 @@ pub mod finite_function; pub mod indexed_coproduct; pub mod operations; pub mod semifinite; +pub mod union_find; // Strict open hypergraphs pub mod strict; diff --git a/src/union_find.rs b/src/union_find.rs new file mode 100644 index 0000000..ce3a0df --- /dev/null +++ b/src/union_find.rs @@ -0,0 +1,90 @@ +#[derive(Debug, Clone)] +pub struct UnionFind { + parent: Vec, + size: Vec, + history: Vec, + components: usize, +} + +#[derive(Debug, Clone)] +enum HistoryEntry { + Noop, + Merge { + root: usize, + parent: usize, + size_parent: usize, + }, +} + +impl UnionFind { + pub fn new(n: usize) -> Self { + Self { + parent: (0..n).collect(), + size: vec![1; n], + history: Vec::new(), + components: n, + } + } + + pub fn len(&self) -> usize { + self.parent.len() + } + + pub fn components(&self) -> usize { + self.components + } + + pub fn find(&mut self, x: usize) -> usize { + let mut node = x; + while self.parent[node] != node { + node = self.parent[node]; + } + node + } + + pub fn union(&mut self, x: usize, y: usize) { + let root_x = self.find(x); + let root_y = self.find(y); + if root_x == root_y { + self.history.push(HistoryEntry::Noop); + return; + } + + let (root, parent) = if self.size[root_x] >= self.size[root_y] { + (root_y, root_x) + } else { + (root_x, root_y) + }; + + self.history.push(HistoryEntry::Merge { + root, + parent, + size_parent: self.size[parent], + }); + + self.parent[root] = parent; + self.size[parent] += self.size[root]; + self.components -= 1; + } + + pub fn snapshot(&self) -> usize { + self.history.len() + } + + pub fn rollback(&mut self, snapshot: usize) { + while self.history.len() > snapshot { + match self.history.pop().expect("rollback history") { + HistoryEntry::Noop => {} + HistoryEntry::Merge { + root, + parent, + size_parent, + } => { + self.parent[root] = root; + self.size[parent] = size_parent; + self.components += 1; + } + } + } + } +} From b346cbcca3053654cf9cb555135eac0843adf865 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 07:34:40 +0100 Subject: [PATCH 17/41] refactor using quotiented_with --- src/lax/hypergraph.rs | 11 +++++++++++ src/lax/rewrite.rs | 22 +++++++++------------- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 28710d3..c107bad 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -410,6 +410,17 @@ impl Hypergraph { } impl Hypergraph { + /// Return a quotiented copy along with the coequalizer used. + pub fn quotiented_with( + mut self, + quotient_left: Vec, + quotient_right: Vec, + ) -> (Self, FiniteFunction) { + self.quotient = (quotient_left, quotient_right); + let q = self.quotient(); + (self, q) + } + /// Construct a [`Hypergraph`] by identifying nodes in the quotient map. /// Mutably quotient this [`Hypergraph`], returning the coequalizer calculated from `self.quotient`. /// diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index a3c7d59..f52091c 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -60,9 +60,10 @@ pub fn rewrite( } } - let mut complement = exploded.graph.clone(); - complement.quotient = (quotient_left, quotient_right); - let q = complement.quotient(); + let (complement, q) = exploded + .graph + .clone() + .quotiented_with(quotient_left, quotient_right); let copied_nodes = exploded.graph.nodes.len() - rule.apex.nodes.len(); let k_to_c_nodes = FiniteFunction::::new( @@ -666,8 +667,7 @@ mod tests { targets: vec![w5], }, ); - c1.quotient = (vec![a1, a0], vec![k0, k1]); - let q1 = c1.quotient(); + let (c1, q1) = c1.quotiented_with(vec![a1, a0], vec![k0, k1]); let k0_c1 = NodeId(q1.table[k0.0]); let k1_c1 = NodeId(q1.table[k1.0]); let mut p1 = rule.right.coproduct(&c1); @@ -716,8 +716,7 @@ mod tests { targets: vec![w5], }, ); - c2.quotient = (vec![a1, a0], vec![k1, k0]); - let q2 = c2.quotient(); + let (c2, q2) = c2.quotiented_with(vec![a1, a0], vec![k1, k0]); let k0_c2 = NodeId(q2.table[k0.0]); let k1_c2 = NodeId(q2.table[k1.0]); let mut p2 = rule.right.coproduct(&c2); @@ -766,8 +765,7 @@ mod tests { targets: vec![w5], }, ); - c3.quotient = (vec![a0, a0], vec![a1, k1]); - let q3 = c3.quotient(); + let (c3, q3) = c3.quotiented_with(vec![a0, a0], vec![a1, k1]); let k0_c3 = NodeId(q3.table[k0.0]); let k1_c3 = NodeId(q3.table[k1.0]); let mut p3 = rule.right.coproduct(&c3); @@ -816,8 +814,7 @@ mod tests { targets: vec![w5], }, ); - c4.quotient = (vec![a0, a0], vec![a1, k0]); - let q4 = c4.quotient(); + let (c4, q4) = c4.quotiented_with(vec![a0, a0], vec![a1, k0]); let k0_c4 = NodeId(q4.table[k0.0]); let k1_c4 = NodeId(q4.table[k1.0]); let mut p4 = rule.right.coproduct(&c4); @@ -866,8 +863,7 @@ mod tests { targets: vec![w5], }, ); - c5.quotient = (vec![a0, a0, a0], vec![a1, k0, k1]); - let q5 = c5.quotient(); + let (c5, q5) = c5.quotiented_with(vec![a0, a0, a0], vec![a1, k0, k1]); let k0_c5 = NodeId(q5.table[k0.0]); let k1_c5 = NodeId(q5.table[k1.0]); let mut p5 = rule.right.coproduct(&c5); From 87ac97024798b02e196840986f70180d51002820 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 07:57:39 +0100 Subject: [PATCH 18/41] factor out and renaming interface to complement --- src/lax/rewrite.rs | 37 ++++++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index f52091c..78cee5e 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -7,6 +7,13 @@ struct ExplodedContext { graph: Hypergraph, to_g: NodeEdgeMap, to_copied_plus_left: NodeEdgeMap, + interface_in_exploded_nodes: FiniteFunction, +} + +impl ExplodedContext { + fn interface_in_exploded_nodes(&self) -> &FiniteFunction { + &self.interface_in_exploded_nodes + } } /// Rewrite a lax hypergraph using a rule span and candidate map. @@ -65,22 +72,22 @@ pub fn rewrite( .clone() .quotiented_with(quotient_left, quotient_right); - let copied_nodes = exploded.graph.nodes.len() - rule.apex.nodes.len(); - let k_to_c_nodes = FiniteFunction::::new( - VecArray( - (0..rule.apex.nodes.len()) - .map(|idx| q.table[copied_nodes + idx]) - .collect(), - ), - complement.nodes.len(), - ) - .expect("k to complement map"); - let k_to_c = NodeEdgeMap { - nodes: k_to_c_nodes, + let interface_to_complement_nodes = exploded + .interface_in_exploded_nodes() + .compose(&q) + .expect("interface to complement map"); + let interface_to_complement = NodeEdgeMap { + nodes: interface_to_complement_nodes, edges: FiniteFunction::::initial(complement.edges.len()), }; - let span = Span::new(rule.apex, rule.right, &complement, rule.right_map, &k_to_c); + let span = Span::new( + rule.apex, + rule.right, + &complement, + rule.right_map, + &interface_to_complement, + ); span.pushout() } @@ -226,10 +233,14 @@ fn exploded_context( .expect("coproduct id + left edges"), }; + let interface_in_exploded_nodes = + FiniteFunction::::identity(rule.apex.nodes.len()).inject1(copied_nodes); + ExplodedContext { graph: h.coproduct(&rule.apex), to_g, to_copied_plus_left, + interface_in_exploded_nodes, } } From 765059d7dc5e0a0fe5384e7b27032ed73ce33d78 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 08:03:40 +0100 Subject: [PATCH 19/41] refactor --- src/lax/rewrite.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 78cee5e..f4cb91a 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -72,13 +72,15 @@ pub fn rewrite( .clone() .quotiented_with(quotient_left, quotient_right); - let interface_to_complement_nodes = exploded - .interface_in_exploded_nodes() - .compose(&q) - .expect("interface to complement map"); - let interface_to_complement = NodeEdgeMap { - nodes: interface_to_complement_nodes, - edges: FiniteFunction::::initial(complement.edges.len()), + let interface_to_complement = { + let nodes = exploded + .interface_in_exploded_nodes() + .compose(&q) + .expect("interface to complement map"); + NodeEdgeMap { + nodes, + edges: FiniteFunction::::initial(complement.edges.len()), + } }; let span = Span::new( From 3567efd9ae3879111260b0997c69b239619d05a0 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 08:19:09 +0100 Subject: [PATCH 20/41] renaming --- src/lax/rewrite.rs | 77 +++++++++++++++++++++++----------------------- 1 file changed, 39 insertions(+), 38 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index f4cb91a..ce7c183 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -5,7 +5,7 @@ use crate::union_find::UnionFind; struct ExplodedContext { graph: Hypergraph, - to_g: NodeEdgeMap, + to_host: NodeEdgeMap, to_copied_plus_left: NodeEdgeMap, interface_in_exploded_nodes: FiniteFunction, } @@ -34,12 +34,7 @@ pub fn rewrite( .collect(); if partitions_per_fiber.is_empty() { - return vec![pushout_result( - &exploded, - rule, - &partitions_per_fiber, - &[], - )]; + return vec![pushout_result(&exploded, rule, &partitions_per_fiber, &[])]; } let mut complements = Vec::new(); @@ -138,36 +133,36 @@ pub fn rewrite( } fn exploded_context( - g: &Hypergraph, + host: &Hypergraph, rule: &Span<'_, O, A>, candidate: &NodeEdgeMap, ) -> ExplodedContext { - let mut in_image_nodes = vec![false; g.nodes.len()]; + let mut in_image_nodes = vec![false; host.nodes.len()]; for i in 0..candidate.nodes.source() { let idx = candidate.nodes.table[i]; in_image_nodes[idx] = true; } - let mut in_image_edges = vec![false; g.edges.len()]; + let mut in_image_edges = vec![false; host.edges.len()]; for i in 0..candidate.edges.source() { let idx = candidate.edges.table[i]; in_image_edges[idx] = true; } - let mut h = Hypergraph::empty(); - let mut h_node_to_g = Vec::new(); - let mut node_map: Vec> = vec![None; g.nodes.len()]; - for (idx, label) in g.nodes.iter().enumerate() { + let mut remainder = Hypergraph::empty(); + let mut remainder_node_to_host = Vec::new(); + let mut node_map: Vec> = vec![None; host.nodes.len()]; + for (idx, label) in host.nodes.iter().enumerate() { if in_image_nodes[idx] { continue; } - let new_id = h.new_node(label.clone()); - h_node_to_g.push(idx); + let new_id = remainder.new_node(label.clone()); + remainder_node_to_host.push(idx); node_map[idx] = Some(new_id.0); } - let mut h_edge_to_g = Vec::new(); - for (edge_id, edge) in g.adjacency.iter().enumerate() { + let mut remainder_edge_to_host = Vec::new(); + for (edge_id, edge) in host.adjacency.iter().enumerate() { if in_image_edges[edge_id] { continue; } @@ -177,8 +172,8 @@ fn exploded_context( let new_id = match node_map[node.0] { Some(existing) => NodeId(existing), None => { - let new_id = h.new_node(g.nodes[node.0].clone()); - h_node_to_g.push(node.0); + let new_id = remainder.new_node(host.nodes[node.0].clone()); + remainder_node_to_host.push(node.0); new_id } }; @@ -190,38 +185,44 @@ fn exploded_context( let new_id = match node_map[node.0] { Some(existing) => NodeId(existing), None => { - let new_id = h.new_node(g.nodes[node.0].clone()); - h_node_to_g.push(node.0); + let new_id = remainder.new_node(host.nodes[node.0].clone()); + remainder_node_to_host.push(node.0); new_id } }; targets.push(new_id); } - h.new_edge(g.edges[edge_id].clone(), Hyperedge { sources, targets }); - h_edge_to_g.push(edge_id); + remainder.new_edge(host.edges[edge_id].clone(), Hyperedge { sources, targets }); + remainder_edge_to_host.push(edge_id); } - let q_h_nodes = FiniteFunction::::new(VecArray(h_node_to_g), g.nodes.len()).unwrap(); - let q_h_edges = FiniteFunction::::new(VecArray(h_edge_to_g), g.edges.len()).unwrap(); - let q_k_nodes = rule + let q_remainder_nodes = + FiniteFunction::::new(VecArray(remainder_node_to_host), host.nodes.len()).unwrap(); + let q_remainder_edges = + FiniteFunction::::new(VecArray(remainder_edge_to_host), host.edges.len()).unwrap(); + let q_interface_nodes = rule .left_map .nodes .compose(&candidate.nodes) .expect("candidate map left nodes compose"); - let q_k_edges = rule + let q_interface_edges = rule .left_map .edges .compose(&candidate.edges) .expect("candidate map left edges compose"); - let to_g = NodeEdgeMap { - nodes: q_h_nodes.coproduct(&q_k_nodes).expect("node coproduct"), - edges: q_h_edges.coproduct(&q_k_edges).expect("edge coproduct"), + let to_host = NodeEdgeMap { + nodes: q_remainder_nodes + .coproduct(&q_interface_nodes) + .expect("node coproduct"), + edges: q_remainder_edges + .coproduct(&q_interface_edges) + .expect("edge coproduct"), }; - let copied_nodes = h.nodes.len(); - let copied_edges = h.edges.len(); + let copied_nodes = remainder.nodes.len(); + let copied_edges = remainder.edges.len(); let left_nodes = rule.left.nodes.len(); let left_edges = rule.left.edges.len(); let to_copied_plus_left = NodeEdgeMap { @@ -239,16 +240,16 @@ fn exploded_context( FiniteFunction::::identity(rule.apex.nodes.len()).inject1(copied_nodes); ExplodedContext { - graph: h.coproduct(&rule.apex), - to_g, + graph: remainder.coproduct(&rule.apex), + to_host, to_copied_plus_left, interface_in_exploded_nodes, } } fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec { - let mut fibers = vec![Vec::new(); exploded.to_g.nodes.target()]; - for (src, &tgt) in exploded.to_g.nodes.table.iter().enumerate() { + let mut fibers = vec![Vec::new(); exploded.to_host.nodes.target()]; + for (src, &tgt) in exploded.to_host.nodes.table.iter().enumerate() { fibers[tgt].push(NodeId(src)); } @@ -553,7 +554,7 @@ mod tests { let mut f_prime_to_q = vec![None; exploded.to_copied_plus_left.nodes.target()]; for (src, &f_prime_image) in exploded.to_copied_plus_left.nodes.table.iter().enumerate() { - let q_image = exploded.to_g.nodes.table[src]; + let q_image = exploded.to_host.nodes.table[src]; match f_prime_to_q[f_prime_image] { Some(existing) => assert_eq!(existing, q_image), None => f_prime_to_q[f_prime_image] = Some(q_image), From c264b985d941f48dfccb39fe97b2c90d8e05a167 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 08:36:41 +0100 Subject: [PATCH 21/41] renaming --- src/lax/rewrite.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index ce7c183..db5ceee 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -6,7 +6,7 @@ use crate::union_find::UnionFind; struct ExplodedContext { graph: Hypergraph, to_host: NodeEdgeMap, - to_copied_plus_left: NodeEdgeMap, + to_remainder_plus_redex: NodeEdgeMap, interface_in_exploded_nodes: FiniteFunction, } @@ -225,7 +225,7 @@ fn exploded_context( let copied_edges = remainder.edges.len(); let left_nodes = rule.left.nodes.len(); let left_edges = rule.left.edges.len(); - let to_copied_plus_left = NodeEdgeMap { + let to_remainder_plus_redex = NodeEdgeMap { nodes: FiniteFunction::::identity(copied_nodes) .inject0(left_nodes) .coproduct(&rule.left_map.nodes.inject1(copied_nodes)) @@ -242,7 +242,7 @@ fn exploded_context( ExplodedContext { graph: remainder.coproduct(&rule.apex), to_host, - to_copied_plus_left, + to_remainder_plus_redex, interface_in_exploded_nodes, } } @@ -258,11 +258,11 @@ fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec existing, None => { From 65f8c8fcd555f1f12937ab2ad374195da6ca6455 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 08:37:24 +0100 Subject: [PATCH 22/41] edit --- src/lax/rewrite.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index db5ceee..a4cfc41 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -229,11 +229,11 @@ fn exploded_context( nodes: FiniteFunction::::identity(copied_nodes) .inject0(left_nodes) .coproduct(&rule.left_map.nodes.inject1(copied_nodes)) - .expect("coproduct id + left nodes"), + .expect("remainder + redex nodes"), edges: FiniteFunction::::identity(copied_edges) .inject0(left_edges) .coproduct(&rule.left_map.edges.inject1(copied_edges)) - .expect("coproduct id + left edges"), + .expect("cremainder + redex edges"), }; let interface_in_exploded_nodes = From f2429dda0a18633ab6121ba157af898ee845adae Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 08:41:18 +0100 Subject: [PATCH 23/41] renaming --- src/lax/rewrite.rs | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index a4cfc41..2f15d73 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -4,7 +4,7 @@ use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, S use crate::union_find::UnionFind; struct ExplodedContext { - graph: Hypergraph, + remainder_plus_interface: Hypergraph, to_host: NodeEdgeMap, to_remainder_plus_redex: NodeEdgeMap, interface_in_exploded_nodes: FiniteFunction, @@ -63,7 +63,7 @@ pub fn rewrite( } let (complement, q) = exploded - .graph + .remainder_plus_interface .clone() .quotiented_with(quotient_left, quotient_right); @@ -240,7 +240,7 @@ fn exploded_context( FiniteFunction::::identity(rule.apex.nodes.len()).inject1(copied_nodes); ExplodedContext { - graph: remainder.coproduct(&rule.apex), + remainder_plus_interface: remainder.coproduct(&rule.apex), to_host, to_remainder_plus_redex, interface_in_exploded_nodes, @@ -542,7 +542,7 @@ mod tests { expected.new_node("w".to_string()); expected.new_node("w".to_string()); - assert_eq!(exploded.graph, expected); + assert_eq!(exploded.remainder_plus_interface, expected); } #[test] @@ -552,8 +552,14 @@ mod tests { let rule = Span::new(&apex, &left, &right, &left_map, &right_map); let exploded = exploded_context(&g, &rule, &candidate); - let mut f_prime_to_q = vec![None; exploded.to_copied_plus_left.nodes.target()]; - for (src, &f_prime_image) in exploded.to_copied_plus_left.nodes.table.iter().enumerate() { + let mut f_prime_to_q = vec![None; exploded.to_remainder_plus_redex.nodes.target()]; + for (src, &f_prime_image) in exploded + .to_remainder_plus_redex + .nodes + .table + .iter() + .enumerate() + { let q_image = exploded.to_host.nodes.table[src]; match f_prime_to_q[f_prime_image] { Some(existing) => assert_eq!(existing, q_image), @@ -570,7 +576,7 @@ mod tests { let exploded = exploded_context(&g, &rule, &candidate); let fibers = fiber_partition_inputs(&exploded); - let copied_nodes = exploded.graph.nodes.len() - rule.apex.nodes.len(); + let copied_nodes = exploded.remainder_plus_interface.nodes.len() - rule.apex.nodes.len(); let target_fiber = fibers .iter() .find(|fiber| { From c3f4766fe43b86f4ad5b566ddd7c600e672906ea Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 08:58:32 +0100 Subject: [PATCH 24/41] compose NodeEdgeMap --- src/lax/hypergraph.rs | 29 +++++++++++++++++++++++++++++ src/lax/rewrite.rs | 39 ++++++++++++--------------------------- 2 files changed, 41 insertions(+), 27 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index c107bad..6668add 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -1,4 +1,5 @@ use crate::array::vec::{VecArray, VecKind}; +use crate::category::Coproduct; use crate::finite_function::*; use crate::lax::Arrow; @@ -26,6 +27,34 @@ pub struct NodeEdgeMap { pub edges: FiniteFunction, } +impl NodeEdgeMap { + pub fn compose(&self, other: &NodeEdgeMap) -> Self { + NodeEdgeMap { + nodes: self + .nodes + .compose(&other.nodes) + .expect("node map compose"), + edges: self + .edges + .compose(&other.edges) + .expect("edge map compose"), + } + } + + pub fn coproduct(&self, other: &NodeEdgeMap) -> Self { + NodeEdgeMap { + nodes: self + .nodes + .coproduct(&other.nodes) + .expect("node map coproduct"), + edges: self + .edges + .coproduct(&other.edges) + .expect("edge map coproduct"), + } + } +} + #[derive(Clone, Copy)] pub struct Span<'a, O, A> { pub apex: &'a Hypergraph, diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 2f15d73..e58d7fb 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -201,40 +201,25 @@ fn exploded_context( FiniteFunction::::new(VecArray(remainder_node_to_host), host.nodes.len()).unwrap(); let q_remainder_edges = FiniteFunction::::new(VecArray(remainder_edge_to_host), host.edges.len()).unwrap(); - let q_interface_nodes = rule - .left_map - .nodes - .compose(&candidate.nodes) - .expect("candidate map left nodes compose"); - let q_interface_edges = rule - .left_map - .edges - .compose(&candidate.edges) - .expect("candidate map left edges compose"); - + let q_interface = rule.left_map.compose(candidate); let to_host = NodeEdgeMap { - nodes: q_remainder_nodes - .coproduct(&q_interface_nodes) - .expect("node coproduct"), - edges: q_remainder_edges - .coproduct(&q_interface_edges) - .expect("edge coproduct"), - }; + nodes: q_remainder_nodes, + edges: q_remainder_edges, + } + .coproduct(&q_interface); let copied_nodes = remainder.nodes.len(); let copied_edges = remainder.edges.len(); let left_nodes = rule.left.nodes.len(); let left_edges = rule.left.edges.len(); let to_remainder_plus_redex = NodeEdgeMap { - nodes: FiniteFunction::::identity(copied_nodes) - .inject0(left_nodes) - .coproduct(&rule.left_map.nodes.inject1(copied_nodes)) - .expect("remainder + redex nodes"), - edges: FiniteFunction::::identity(copied_edges) - .inject0(left_edges) - .coproduct(&rule.left_map.edges.inject1(copied_edges)) - .expect("cremainder + redex edges"), - }; + nodes: FiniteFunction::::identity(copied_nodes).inject0(left_nodes), + edges: FiniteFunction::::identity(copied_edges).inject0(left_edges), + } + .coproduct(&NodeEdgeMap { + nodes: rule.left_map.nodes.inject1(copied_nodes), + edges: rule.left_map.edges.inject1(copied_edges), + }); let interface_in_exploded_nodes = FiniteFunction::::identity(rule.apex.nodes.len()).inject1(copied_nodes); From 8c85893cbf53c7ed42dc907daa78a158547370cb Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 09:01:02 +0100 Subject: [PATCH 25/41] renaming --- src/lax/rewrite.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index e58d7fb..61e08d8 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -135,17 +135,17 @@ pub fn rewrite( fn exploded_context( host: &Hypergraph, rule: &Span<'_, O, A>, - candidate: &NodeEdgeMap, + matching: &NodeEdgeMap, ) -> ExplodedContext { let mut in_image_nodes = vec![false; host.nodes.len()]; - for i in 0..candidate.nodes.source() { - let idx = candidate.nodes.table[i]; + for i in 0..matching.nodes.source() { + let idx = matching.nodes.table[i]; in_image_nodes[idx] = true; } let mut in_image_edges = vec![false; host.edges.len()]; - for i in 0..candidate.edges.source() { - let idx = candidate.edges.table[i]; + for i in 0..matching.edges.source() { + let idx = matching.edges.table[i]; in_image_edges[idx] = true; } @@ -201,7 +201,7 @@ fn exploded_context( FiniteFunction::::new(VecArray(remainder_node_to_host), host.nodes.len()).unwrap(); let q_remainder_edges = FiniteFunction::::new(VecArray(remainder_edge_to_host), host.edges.len()).unwrap(); - let q_interface = rule.left_map.compose(candidate); + let q_interface = rule.left_map.compose(matching); let to_host = NodeEdgeMap { nodes: q_remainder_nodes, edges: q_remainder_edges, From d348f1af9f34c350f14521e9a8a9dab2c82d0015 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 09:11:06 +0100 Subject: [PATCH 26/41] add coproduct_with_injections --- src/lax/hypergraph.rs | 20 ++++++++++++++++++++ src/lax/rewrite.rs | 33 +++++++++++++-------------------- 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 6668add..0c048cc 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -514,6 +514,26 @@ pub(crate) fn concat(v1: &[T], v2: &[T]) -> Vec { } impl Hypergraph { + pub(crate) fn coproduct_with_injections( + &self, + other: &Hypergraph, + ) -> (Hypergraph, NodeEdgeMap, NodeEdgeMap) { + let coproduct = self.coproduct(other); + let left = NodeEdgeMap { + nodes: FiniteFunction::::identity(self.nodes.len()) + .inject0(other.nodes.len()), + edges: FiniteFunction::::identity(self.edges.len()) + .inject0(other.edges.len()), + }; + let right = NodeEdgeMap { + nodes: FiniteFunction::::identity(other.nodes.len()) + .inject1(self.nodes.len()), + edges: FiniteFunction::::identity(other.edges.len()) + .inject1(self.edges.len()), + }; + (coproduct, left, right) + } + pub(crate) fn coproduct(&self, other: &Hypergraph) -> Hypergraph { let n = self.nodes.len(); diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 61e08d8..39429d5 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -7,12 +7,12 @@ struct ExplodedContext { remainder_plus_interface: Hypergraph, to_host: NodeEdgeMap, to_remainder_plus_redex: NodeEdgeMap, - interface_in_exploded_nodes: FiniteFunction, + interface_in_exploded: NodeEdgeMap, } impl ExplodedContext { - fn interface_in_exploded_nodes(&self) -> &FiniteFunction { - &self.interface_in_exploded_nodes + fn interface_in_exploded(&self) -> &NodeEdgeMap { + &self.interface_in_exploded } } @@ -69,7 +69,8 @@ pub fn rewrite( let interface_to_complement = { let nodes = exploded - .interface_in_exploded_nodes() + .interface_in_exploded() + .nodes .compose(&q) .expect("interface to complement map"); NodeEdgeMap { @@ -208,27 +209,19 @@ fn exploded_context( } .coproduct(&q_interface); - let copied_nodes = remainder.nodes.len(); - let copied_edges = remainder.edges.len(); - let left_nodes = rule.left.nodes.len(); - let left_edges = rule.left.edges.len(); - let to_remainder_plus_redex = NodeEdgeMap { - nodes: FiniteFunction::::identity(copied_nodes).inject0(left_nodes), - edges: FiniteFunction::::identity(copied_edges).inject0(left_edges), - } - .coproduct(&NodeEdgeMap { - nodes: rule.left_map.nodes.inject1(copied_nodes), - edges: rule.left_map.edges.inject1(copied_edges), - }); + let (_remainder_plus_redex, remainder_in_remainder_plus_redex, redex_in_remainder_plus_redex) = + remainder.coproduct_with_injections(rule.left); + let to_remainder_plus_redex = remainder_in_remainder_plus_redex + .coproduct(&rule.left_map.compose(&redex_in_remainder_plus_redex)); - let interface_in_exploded_nodes = - FiniteFunction::::identity(rule.apex.nodes.len()).inject1(copied_nodes); + let (remainder_plus_interface, _remainder_in_exploded, interface_in_exploded) = + remainder.coproduct_with_injections(rule.apex); ExplodedContext { - remainder_plus_interface: remainder.coproduct(&rule.apex), + remainder_plus_interface, to_host, to_remainder_plus_redex, - interface_in_exploded_nodes, + interface_in_exploded, } } From 6bc03ea41fe8fb51a578e07a3ca2d28b32ac7d94 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 09:43:32 +0100 Subject: [PATCH 27/41] add remainder_with_interface --- src/lax/hypergraph.rs | 92 +++++++++++++++++++++++++++++++++++++++++++ src/lax/rewrite.rs | 71 +-------------------------------- 2 files changed, 94 insertions(+), 69 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 0c048cc..f3e0fdb 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -514,6 +514,98 @@ pub(crate) fn concat(v1: &[T], v2: &[T]) -> Vec { } impl Hypergraph { + pub fn remainder_with_injection( + &self, + excluded: &NodeEdgeMap, + ) -> (Hypergraph, NodeEdgeMap) { + let mut in_image_nodes = vec![false; self.nodes.len()]; + for &idx in excluded.nodes.table.iter() { + if idx >= self.nodes.len() { + panic!( + "excluded node index out of range: got {}, max {}", + idx, + self.nodes.len() + ); + } + in_image_nodes[idx] = true; + } + + let mut in_image_edges = vec![false; self.edges.len()]; + for &idx in excluded.edges.table.iter() { + if idx >= self.edges.len() { + panic!( + "excluded edge index out of range: got {}, max {}", + idx, + self.edges.len() + ); + } + in_image_edges[idx] = true; + } + + let mut remainder = Hypergraph::empty(); + let mut remainder_node_to_host = Vec::new(); + let mut node_map: Vec> = vec![None; self.nodes.len()]; + for (idx, label) in self.nodes.iter().enumerate() { + if in_image_nodes[idx] { + continue; + } + let new_id = remainder.new_node(label.clone()); + remainder_node_to_host.push(idx); + node_map[idx] = Some(new_id.0); + } + + let mut remainder_edge_to_host = Vec::new(); + for (edge_id, edge) in self.adjacency.iter().enumerate() { + if in_image_edges[edge_id] { + continue; + } + + let mut sources = Vec::with_capacity(edge.sources.len()); + for node in &edge.sources { + let new_id = match node_map[node.0] { + Some(existing) => NodeId(existing), + None => { + let new_id = remainder.new_node(self.nodes[node.0].clone()); + remainder_node_to_host.push(node.0); + new_id + } + }; + sources.push(new_id); + } + + let mut targets = Vec::with_capacity(edge.targets.len()); + for node in &edge.targets { + let new_id = match node_map[node.0] { + Some(existing) => NodeId(existing), + None => { + let new_id = remainder.new_node(self.nodes[node.0].clone()); + remainder_node_to_host.push(node.0); + new_id + } + }; + targets.push(new_id); + } + + remainder.new_edge(self.edges[edge_id].clone(), Hyperedge { sources, targets }); + remainder_edge_to_host.push(edge_id); + } + + let remainder_in_host = NodeEdgeMap { + nodes: FiniteFunction::::new( + VecArray(remainder_node_to_host), + self.nodes.len(), + ) + .expect("remainder node injection"), + edges: FiniteFunction::::new( + VecArray(remainder_edge_to_host), + self.edges.len(), + ) + .expect("remainder edge injection"), + }; + + (remainder, remainder_in_host) + } + pub(crate) fn coproduct_with_injections( &self, other: &Hypergraph, diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 39429d5..c918258 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -138,76 +138,9 @@ fn exploded_context( rule: &Span<'_, O, A>, matching: &NodeEdgeMap, ) -> ExplodedContext { - let mut in_image_nodes = vec![false; host.nodes.len()]; - for i in 0..matching.nodes.source() { - let idx = matching.nodes.table[i]; - in_image_nodes[idx] = true; - } - - let mut in_image_edges = vec![false; host.edges.len()]; - for i in 0..matching.edges.source() { - let idx = matching.edges.table[i]; - in_image_edges[idx] = true; - } - - let mut remainder = Hypergraph::empty(); - let mut remainder_node_to_host = Vec::new(); - let mut node_map: Vec> = vec![None; host.nodes.len()]; - for (idx, label) in host.nodes.iter().enumerate() { - if in_image_nodes[idx] { - continue; - } - let new_id = remainder.new_node(label.clone()); - remainder_node_to_host.push(idx); - node_map[idx] = Some(new_id.0); - } - - let mut remainder_edge_to_host = Vec::new(); - for (edge_id, edge) in host.adjacency.iter().enumerate() { - if in_image_edges[edge_id] { - continue; - } - - let mut sources = Vec::with_capacity(edge.sources.len()); - for node in &edge.sources { - let new_id = match node_map[node.0] { - Some(existing) => NodeId(existing), - None => { - let new_id = remainder.new_node(host.nodes[node.0].clone()); - remainder_node_to_host.push(node.0); - new_id - } - }; - sources.push(new_id); - } - - let mut targets = Vec::with_capacity(edge.targets.len()); - for node in &edge.targets { - let new_id = match node_map[node.0] { - Some(existing) => NodeId(existing), - None => { - let new_id = remainder.new_node(host.nodes[node.0].clone()); - remainder_node_to_host.push(node.0); - new_id - } - }; - targets.push(new_id); - } - - remainder.new_edge(host.edges[edge_id].clone(), Hyperedge { sources, targets }); - remainder_edge_to_host.push(edge_id); - } - - let q_remainder_nodes = - FiniteFunction::::new(VecArray(remainder_node_to_host), host.nodes.len()).unwrap(); - let q_remainder_edges = - FiniteFunction::::new(VecArray(remainder_edge_to_host), host.edges.len()).unwrap(); + let (remainder, remainder_in_host) = host.remainder_with_injection(matching); let q_interface = rule.left_map.compose(matching); - let to_host = NodeEdgeMap { - nodes: q_remainder_nodes, - edges: q_remainder_edges, - } - .coproduct(&q_interface); + let to_host = remainder_in_host.coproduct(&q_interface); let (_remainder_plus_redex, remainder_in_remainder_plus_redex, redex_in_remainder_plus_redex) = remainder.coproduct_with_injections(rule.left); From 8462044861898cf240a0cd57eadaf61836b8a8e9 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 17:03:56 +0100 Subject: [PATCH 28/41] move partitions --- src/lax/rewrite.rs | 133 ++++++------------------------------ src/lib.rs | 1 + src/partition.rs | 166 +++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 187 insertions(+), 113 deletions(-) create mode 100644 src/partition.rs diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index c918258..582daa5 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -1,7 +1,7 @@ -use crate::array::vec::{VecArray, VecKind}; +use crate::array::vec::VecKind; use crate::finite_function::FiniteFunction; -use crate::lax::{Arrow, Coproduct, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; -use crate::union_find::UnionFind; +use crate::lax::{Arrow, Coproduct as _, Hypergraph, NodeEdgeMap, NodeId, Span}; +use crate::partition::{enumerate_partitions, Partition, PartitionInput}; struct ExplodedContext { remainder_plus_interface: Hypergraph, @@ -28,10 +28,8 @@ pub fn rewrite( } let exploded = exploded_context(g, rule, candidate); let fiber_inputs = fiber_partition_inputs(&exploded); - let partitions_per_fiber: Vec> = fiber_inputs - .iter() - .map(enumerate_fiber_partitions) - .collect(); + let partitions_per_fiber: Vec>> = + fiber_inputs.iter().map(enumerate_partitions).collect(); if partitions_per_fiber.is_empty() { return vec![pushout_result(&exploded, rule, &partitions_per_fiber, &[])]; @@ -43,7 +41,7 @@ pub fn rewrite( fn pushout_result( exploded: &ExplodedContext, rule: &Span<'_, O, A>, - partitions_per_fiber: &[Vec], + partitions_per_fiber: &[Vec>], selection: &[usize], ) -> Hypergraph { let mut quotient_left = Vec::new(); @@ -52,7 +50,7 @@ pub fn rewrite( for (fiber_idx, &partition_idx) in selection.iter().enumerate() { let partition = &partitions_per_fiber[fiber_idx][partition_idx]; for block in &partition.blocks { - let Some((first, rest)) = block.nodes.split_first() else { + let Some((first, rest)) = block.elements.split_first() else { continue; }; for node in rest { @@ -93,7 +91,7 @@ pub fn rewrite( idx: usize, exploded: &ExplodedContext, rule: &Span<'_, O, A>, - partitions_per_fiber: &[Vec], + partitions_per_fiber: &[Vec>], selection: &mut Vec, results: &mut Vec>, ) { @@ -158,7 +156,7 @@ fn exploded_context( } } -fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec { +fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec> { let mut fibers = vec![Vec::new(); exploded.to_host.nodes.target()]; for (src, &tgt) in exploded.to_host.nodes.table.iter().enumerate() { fibers[tgt].push(NodeId(src)); @@ -186,8 +184,8 @@ fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec(exploded: &ExplodedContext) -> Vec Vec { - let mut results = Vec::new(); - let mut blocks: Vec = Vec::new(); - let mut uf = UnionFind::new(fiber.class_count); - - fn all_connected(uf: &UnionFind) -> bool { - uf.components() == 1 - } - - fn walk( - idx: usize, - fiber: &FiberPartitionInput, - blocks: &mut Vec, - uf: &mut UnionFind, - results: &mut Vec, - ) { - if idx == fiber.nodes.len() { - if all_connected(uf) { - let blocks = blocks - .iter() - .map(|b| FiberBlock { - nodes: b.nodes.clone(), - }) - .collect(); - results.push(FiberPartition { blocks }); - } - return; - } - - let node = fiber.nodes[idx]; - let class_id = fiber.class_ids[idx]; - - for i in 0..blocks.len() { - let snap = uf.snapshot(); - let (nodes_len, classes_len) = { - let block = &mut blocks[i]; - let nodes_len = block.nodes.len(); - let classes_len = block.classes.len(); - - block.nodes.push(node); - if !block.classes.contains(&class_id) { - if let Some(&rep) = block.classes.first() { - uf.union(rep, class_id); - } - block.classes.push(class_id); - } - - (nodes_len, classes_len) - }; - - walk(idx + 1, fiber, blocks, uf, results); - - uf.rollback(snap); - let block = &mut blocks[i]; - block.nodes.truncate(nodes_len); - block.classes.truncate(classes_len); - } - - blocks.push(BlockState { - nodes: vec![node], - classes: vec![class_id], - }); - walk(idx + 1, fiber, blocks, uf, results); - blocks.pop(); - } - - walk(0, fiber, &mut blocks, &mut uf, &mut results); - results -} - -struct FiberPartitionInput { - nodes: Vec, - class_ids: Vec, - class_count: usize, -} - -struct FiberPartition { - blocks: Vec, -} - -struct FiberBlock { - nodes: Vec, -} - -struct BlockState { - nodes: Vec, - classes: Vec, -} - fn validate_candidate_map( rule: &Span<'_, O, A>, g: &Hypergraph, @@ -393,13 +302,11 @@ mod tests { // Bonchi, Filippo, et al. // "String diagram rewrite theory I: Rewriting with Frobenius structure." // Journal of the ACM (JACM) 69.2 (2022): 1-58. - use super::{ - enumerate_fiber_partitions, exploded_context, fiber_partition_inputs, rewrite, - FiberPartition, - }; + use super::{exploded_context, fiber_partition_inputs, rewrite}; use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; - use crate::lax::{Arrow, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; + use crate::lax::{Arrow as _, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; + use crate::partition::{enumerate_partitions, Partition}; use std::collections::HashMap; fn empty_map(target: usize) -> FiniteFunction { @@ -492,7 +399,7 @@ mod tests { .iter() .find(|fiber| { let apex_count = fiber - .nodes + .elements .iter() .filter(|node| node.0 >= copied_nodes) .count(); @@ -501,7 +408,7 @@ mod tests { .expect("expected fiber containing apex nodes"); let mut apex_nodes = target_fiber - .nodes + .elements .iter() .cloned() .filter(|node| node.0 >= copied_nodes) @@ -509,7 +416,7 @@ mod tests { apex_nodes.sort_by_key(|node| node.0); let mut w4_nodes = target_fiber - .nodes + .elements .iter() .cloned() .filter(|node| node.0 < copied_nodes) @@ -522,7 +429,7 @@ mod tests { name_map.insert(apex_nodes[0], "k0"); name_map.insert(apex_nodes[1], "k1"); - let partitions = enumerate_fiber_partitions(target_fiber); + let partitions = enumerate_partitions(target_fiber); let mut actual = partitions .iter() .map(|partition| normalize_partition(partition, &name_map)) @@ -814,7 +721,7 @@ mod tests { } fn normalize_partition( - partition: &FiberPartition, + partition: &Partition, name_map: &HashMap, ) -> Vec> { let mut blocks = partition @@ -822,7 +729,7 @@ mod tests { .iter() .map(|block| { let mut names = block - .nodes + .elements .iter() .map(|node| *name_map.get(node).expect("name map")) .collect::>(); diff --git a/src/lib.rs b/src/lib.rs index 06a74b9..76878e6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -259,6 +259,7 @@ pub mod category; pub mod finite_function; pub mod indexed_coproduct; pub mod operations; +pub mod partition; pub mod semifinite; pub mod union_find; diff --git a/src/partition.rs b/src/partition.rs new file mode 100644 index 0000000..114f657 --- /dev/null +++ b/src/partition.rs @@ -0,0 +1,166 @@ +use crate::union_find::UnionFind; + +pub struct PartitionInput { + pub elements: Vec, + // class_ids identify the class for each element. + // We search for partitions whose induced relation, together with the class relation, + // generates the total relation (i.e. everything becomes connected). + // We keep per-block class lists so we only union each class once per block, + // avoiding redundant unions when multiple elements in the same class land together. + pub class_ids: Vec, + pub class_count: usize, +} + +pub struct Partition { + pub blocks: Vec>, +} + +pub struct PartitionBlock { + pub elements: Vec, +} + +struct BlockState { + elements: Vec, + classes: Vec, +} + +pub fn enumerate_partitions(input: &PartitionInput) -> Vec> { + let mut results = Vec::new(); + let mut blocks: Vec> = Vec::new(); + let mut uf = UnionFind::new(input.class_count); + + fn all_connected(uf: &UnionFind) -> bool { + uf.components() == 1 + } + + fn walk( + idx: usize, + input: &PartitionInput, + blocks: &mut Vec>, + uf: &mut UnionFind, + results: &mut Vec>, + ) { + if idx == input.elements.len() { + if all_connected(uf) { + let blocks = blocks + .iter() + .map(|b| PartitionBlock { + elements: b.elements.clone(), + }) + .collect(); + results.push(Partition { blocks }); + } + return; + } + + let element = input.elements[idx].clone(); + let class_id = input.class_ids[idx]; + + for i in 0..blocks.len() { + let snap = uf.snapshot(); + let (elements_len, classes_len) = { + let block = &mut blocks[i]; + let elements_len = block.elements.len(); + let classes_len = block.classes.len(); + + block.elements.push(element.clone()); + if !block.classes.contains(&class_id) { + if let Some(&rep) = block.classes.first() { + uf.union(rep, class_id); + } + block.classes.push(class_id); + } + + (elements_len, classes_len) + }; + + walk(idx + 1, input, blocks, uf, results); + + uf.rollback(snap); + let block = &mut blocks[i]; + block.elements.truncate(elements_len); + block.classes.truncate(classes_len); + } + + blocks.push(BlockState { + elements: vec![element], + classes: vec![class_id], + }); + walk(idx + 1, input, blocks, uf, results); + blocks.pop(); + } + + walk(0, input, &mut blocks, &mut uf, &mut results); + results +} + +#[cfg(test)] +mod tests { + use super::{enumerate_partitions, PartitionInput}; + + fn normalize(partition: &super::Partition) -> Vec> { + let mut blocks = partition + .blocks + .iter() + .map(|block| { + let mut elems = block.elements.clone(); + elems.sort(); + elems + }) + .collect::>(); + blocks.sort(); + blocks + } + + #[test] + fn partitions_single_class_allows_all_partitions() { + let input = PartitionInput { + elements: vec![0, 1], + class_ids: vec![0, 0], + class_count: 1, + }; + + let partitions = enumerate_partitions(&input); + assert_eq!(partitions.len(), 2); + } + + #[test] + fn partitions_two_classes_require_connection() { + let input = PartitionInput { + elements: vec![0, 1], + class_ids: vec![0, 1], + class_count: 2, + }; + + let partitions = enumerate_partitions(&input); + assert_eq!(partitions.len(), 1); + assert_eq!(normalize(&partitions[0]), vec![vec![0, 1]]); + } + + #[test] + fn partitions_three_elements_two_classes() { + let input = PartitionInput { + elements: vec![0, 1, 2], + class_ids: vec![0, 0, 1], + class_count: 2, + }; + + let partitions = enumerate_partitions(&input); + assert_eq!(partitions.len(), 3); + + let mut actual = partitions + .iter() + .map(normalize) + .collect::>(); + actual.sort(); + + let mut expected = vec![ + vec![vec![0, 1, 2]], + vec![vec![0, 2], vec![1]], + vec![vec![1, 2], vec![0]], + ]; + expected.sort(); + + assert_eq!(actual, expected); + } +} From 743c6be37392bc0c6d6bc351c3b5ae011882872b Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 17:59:32 +0100 Subject: [PATCH 29/41] more tests --- src/lax/hypergraph.rs | 22 +++------ src/partition.rs | 15 +++++- src/union_find.rs | 29 +++++++++++ tests/lax/hypergraph.rs | 105 +++++++++++++++++++++++++++++++++++++++- 4 files changed, 153 insertions(+), 18 deletions(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index f3e0fdb..b6873e7 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -30,14 +30,8 @@ pub struct NodeEdgeMap { impl NodeEdgeMap { pub fn compose(&self, other: &NodeEdgeMap) -> Self { NodeEdgeMap { - nodes: self - .nodes - .compose(&other.nodes) - .expect("node map compose"), - edges: self - .edges - .compose(&other.edges) - .expect("edge map compose"), + nodes: self.nodes.compose(&other.nodes).expect("node map compose"), + edges: self.edges.compose(&other.edges).expect("edge map compose"), } } @@ -612,16 +606,12 @@ impl Hypergraph { ) -> (Hypergraph, NodeEdgeMap, NodeEdgeMap) { let coproduct = self.coproduct(other); let left = NodeEdgeMap { - nodes: FiniteFunction::::identity(self.nodes.len()) - .inject0(other.nodes.len()), - edges: FiniteFunction::::identity(self.edges.len()) - .inject0(other.edges.len()), + nodes: FiniteFunction::::identity(self.nodes.len()).inject0(other.nodes.len()), + edges: FiniteFunction::::identity(self.edges.len()).inject0(other.edges.len()), }; let right = NodeEdgeMap { - nodes: FiniteFunction::::identity(other.nodes.len()) - .inject1(self.nodes.len()), - edges: FiniteFunction::::identity(other.edges.len()) - .inject1(self.edges.len()), + nodes: FiniteFunction::::identity(other.nodes.len()).inject1(self.nodes.len()), + edges: FiniteFunction::::identity(other.edges.len()).inject1(self.edges.len()), }; (coproduct, left, right) } diff --git a/src/partition.rs b/src/partition.rs index 114f657..15b65ff 100644 --- a/src/partition.rs +++ b/src/partition.rs @@ -158,7 +158,20 @@ mod tests { vec![vec![0, 1, 2]], vec![vec![0, 2], vec![1]], vec![vec![1, 2], vec![0]], - ]; + ] + .into_iter() + .map(|blocks| { + let mut blocks = blocks + .into_iter() + .map(|mut block| { + block.sort(); + block + }) + .collect::>(); + blocks.sort(); + blocks + }) + .collect::>(); expected.sort(); assert_eq!(actual, expected); diff --git a/src/union_find.rs b/src/union_find.rs index ce3a0df..313ba8f 100644 --- a/src/union_find.rs +++ b/src/union_find.rs @@ -88,3 +88,32 @@ impl UnionFind { } } } + +#[cfg(test)] +mod tests { + use super::UnionFind; + + #[test] + fn union_find_unions_and_connects() { + let mut uf = UnionFind::new(4); + assert_eq!(uf.components(), 4); + uf.union(0, 1); + uf.union(2, 3); + assert_eq!(uf.components(), 2); + uf.union(1, 2); + assert_eq!(uf.components(), 1); + } + + #[test] + fn union_find_snapshot_and_rollback() { + let mut uf = UnionFind::new(3); + uf.union(0, 1); + let snap = uf.snapshot(); + uf.union(1, 2); + assert_eq!(uf.components(), 1); + uf.rollback(snap); + assert_eq!(uf.components(), 2); + uf.union(0, 2); + assert_eq!(uf.components(), 1); + } +} diff --git a/tests/lax/hypergraph.rs b/tests/lax/hypergraph.rs index b3a5cba..662d2b6 100644 --- a/tests/lax/hypergraph.rs +++ b/tests/lax/hypergraph.rs @@ -1,4 +1,6 @@ -use open_hypergraphs::lax::{Hyperedge, Hypergraph, NodeId}; +use open_hypergraphs::array::vec::{VecArray, VecKind}; +use open_hypergraphs::finite_function::FiniteFunction; +use open_hypergraphs::lax::{Coproduct as _, Hyperedge, Hypergraph, NodeEdgeMap, NodeId}; #[test] fn test_delete_nodes_remap_and_quotient() { @@ -82,3 +84,104 @@ fn test_delete_nodes_ignores_out_of_range() { assert_eq!(h.quotient.0, vec![NodeId(0)]); assert_eq!(h.quotient.1, vec![NodeId(1)]); } + +#[test] +fn test_remainder_with_injection_splits_excluded_node_occurrences() { + let mut host = Hypergraph::empty(); + let w = host.new_node(1); + host.new_edge( + 10, + Hyperedge { + sources: vec![w], + targets: vec![w], + }, + ); + host.new_edge( + 20, + Hyperedge { + sources: vec![w], + targets: vec![w], + }, + ); + + let excluded = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![w.0]), host.nodes.len()).unwrap(), + edges: FiniteFunction::::initial(host.edges.len()), + }; + + let (remainder, remainder_in_host) = host.remainder_with_injection(&excluded); + + assert_eq!(remainder.nodes.len(), 4); + assert_eq!(remainder.edges.len(), 2); + assert_eq!(remainder.adjacency.len(), 2); + assert_eq!( + remainder_in_host.nodes.table, + VecArray(vec![w.0, w.0, w.0, w.0]) + ); + assert_eq!(remainder.adjacency[0].sources.len(), 1); + assert_eq!(remainder.adjacency[0].targets.len(), 1); + assert_eq!(remainder.adjacency[1].sources.len(), 1); + assert_eq!(remainder.adjacency[1].targets.len(), 1); + let a0s = remainder.adjacency[0].sources[0]; + let a0t = remainder.adjacency[0].targets[0]; + let a1s = remainder.adjacency[1].sources[0]; + let a1t = remainder.adjacency[1].targets[0]; + assert_ne!(a0s, a0t); + assert_ne!(a1s, a1t); + assert_ne!(a0s, a1s); + assert_ne!(a0s, a1t); + assert_ne!(a0t, a1s); + assert_ne!(a0t, a1t); +} + +#[test] +fn test_remainder_with_injection_empty_excluded_is_identity() { + let mut host = Hypergraph::empty(); + let w0 = host.new_node(1); + let w1 = host.new_node(2); + host.new_edge( + 10, + Hyperedge { + sources: vec![w0], + targets: vec![w1], + }, + ); + + let excluded = NodeEdgeMap { + nodes: FiniteFunction::::initial(host.nodes.len()), + edges: FiniteFunction::::initial(host.edges.len()), + }; + + let (remainder, remainder_in_host) = host.remainder_with_injection(&excluded); + + assert_eq!(remainder, host); + assert_eq!(remainder_in_host.nodes.table, VecArray(vec![0, 1])); + assert_eq!(remainder_in_host.edges.table, VecArray(vec![0])); +} + +#[test] +fn test_remainder_with_injection_excluded_edge_drops_edge_only() { + let mut host = Hypergraph::empty(); + let w0 = host.new_node(1); + let w1 = host.new_node(2); + host.new_edge( + 10, + Hyperedge { + sources: vec![w0], + targets: vec![w1], + }, + ); + + let excluded = NodeEdgeMap { + nodes: FiniteFunction::::initial(host.nodes.len()), + edges: FiniteFunction::::new(VecArray(vec![0]), host.edges.len()).unwrap(), + }; + + let (remainder, remainder_in_host) = host.remainder_with_injection(&excluded); + + assert_eq!(remainder.nodes, host.nodes); + assert!(remainder.edges.is_empty()); + assert!(remainder.adjacency.is_empty()); + assert_eq!(remainder_in_host.nodes.table, VecArray(vec![0, 1])); + assert!(remainder_in_host.edges.table.is_empty()); +} From c7ccd55afc860965acd32fd1eb37f3e6263992bc Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 18:00:00 +0100 Subject: [PATCH 30/41] format --- src/partition.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/partition.rs b/src/partition.rs index 15b65ff..4b8204d 100644 --- a/src/partition.rs +++ b/src/partition.rs @@ -148,10 +148,7 @@ mod tests { let partitions = enumerate_partitions(&input); assert_eq!(partitions.len(), 3); - let mut actual = partitions - .iter() - .map(normalize) - .collect::>(); + let mut actual = partitions.iter().map(normalize).collect::>(); actual.sort(); let mut expected = vec![ From 95a58279f510c5083bf078f15f255a10fc625709 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 19:31:01 +0100 Subject: [PATCH 31/41] format --- tests/lax/hypergraph.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lax/hypergraph.rs b/tests/lax/hypergraph.rs index e5d81a1..bfa9de8 100644 --- a/tests/lax/hypergraph.rs +++ b/tests/lax/hypergraph.rs @@ -1,6 +1,6 @@ use open_hypergraphs::array::vec::{VecArray, VecKind}; use open_hypergraphs::finite_function::FiniteFunction; -use open_hypergraphs::lax::{Coproduct as _, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, EdgeId}; +use open_hypergraphs::lax::{Coproduct as _, EdgeId, Hyperedge, Hypergraph, NodeEdgeMap, NodeId}; #[test] fn test_delete_nodes_remap_and_quotient() { From 1a40aeef32a112b5908373c1035d308dc5453bb2 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 19:59:35 +0100 Subject: [PATCH 32/41] add test --- src/lax/rewrite.rs | 85 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 80 insertions(+), 5 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 582daa5..74906d9 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -298,10 +298,9 @@ fn dangling_condition( #[cfg(test)] mod tests { - // Tests from working examples in - // Bonchi, Filippo, et al. - // "String diagram rewrite theory I: Rewriting with Frobenius structure." - // Journal of the ACM (JACM) 69.2 (2022): 1-58. + // Examples from + // 1. Bonchi, Filippo, et al. "String diagram rewrite theory I: Rewriting with Frobenius structure." Journal of the ACM (JACM) 69.2 (2022): 1-58. + // 2. Heumüller, Marvin, et al. "Construction of pushout complements in the category of hypergraphs." Electronic Communications of the EASST 39 (2011). use super::{exploded_context, fiber_partition_inputs, rewrite}; use crate::array::vec::{VecArray, VecKind}; use crate::finite_function::FiniteFunction; @@ -720,6 +719,82 @@ mod tests { } } + #[test] + fn test_rewrite_pushout_complement_split_match_node() { + // [2] Example 1 + let mut host: Hypergraph = Hypergraph::empty(); + let f_node = host.new_node("w".to_string()); + let u_node = host.new_node("w".to_string()); + host.new_edge( + "e".to_string(), + Hyperedge { + sources: vec![u_node], + targets: vec![u_node], + }, + ); + host.new_edge( + "f".to_string(), + Hyperedge { + sources: vec![f_node], + targets: vec![f_node], + }, + ); + + let mut apex: Hypergraph = Hypergraph::empty(); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + + let mut left: Hypergraph = Hypergraph::empty(); + left.new_node("w".to_string()); + left.new_node("w".to_string()); + + let right = apex.clone(); + + let left_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0, 1, 1]), left.nodes.len()) + .unwrap(), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 1, 2, 3]), right.nodes.len()) + .unwrap(), + edges: empty_map(right.edges.len()), + }; + + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new( + VecArray(vec![f_node.0, f_node.0]), + host.nodes.len(), + ) + .unwrap(), + edges: empty_map(host.edges.len()), + }; + + let complements = rewrite(&host, &rule, &candidate); + assert!(complements.len() == 61, "Found {}", complements.len()); + + let mut has_split_loop = false; + for complement in &complements { + assert_eq!(complement.edges.len(), 2); + let f_idx = complement + .edges + .iter() + .position(|label| label == "f") + .expect("f edge exists"); + let f_edge = &complement.adjacency[f_idx]; + assert_eq!(f_edge.sources.len(), 1); + assert_eq!(f_edge.targets.len(), 1); + if f_edge.sources[0] != f_edge.targets[0] { + has_split_loop = true; + } + } + + assert!(has_split_loop); + } + fn normalize_partition( partition: &Partition, name_map: &HashMap, @@ -752,7 +827,7 @@ mod tests { NodeEdgeMap, NodeEdgeMap, ) { - // From Session 4.5 Pushout Complements and Rewriting Modulo Frobenius + // [1] Session 4.5 Pushout Complements and Rewriting Modulo Frobenius let f_label = "f".to_string(); let g_label = "g".to_string(); From d50e48742a0edd2bf042a9cd2d44b3feff8bc484 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 12 Jan 2026 20:02:14 +0100 Subject: [PATCH 33/41] refactor --- src/lax/rewrite.rs | 113 +++++++++++++++++++++++++-------------------- 1 file changed, 64 insertions(+), 49 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 74906d9..125e62d 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -722,56 +722,9 @@ mod tests { #[test] fn test_rewrite_pushout_complement_split_match_node() { // [2] Example 1 - let mut host: Hypergraph = Hypergraph::empty(); - let f_node = host.new_node("w".to_string()); - let u_node = host.new_node("w".to_string()); - host.new_edge( - "e".to_string(), - Hyperedge { - sources: vec![u_node], - targets: vec![u_node], - }, - ); - host.new_edge( - "f".to_string(), - Hyperedge { - sources: vec![f_node], - targets: vec![f_node], - }, - ); - - let mut apex: Hypergraph = Hypergraph::empty(); - apex.new_node("w".to_string()); - apex.new_node("w".to_string()); - apex.new_node("w".to_string()); - apex.new_node("w".to_string()); - - let mut left: Hypergraph = Hypergraph::empty(); - left.new_node("w".to_string()); - left.new_node("w".to_string()); - - let right = apex.clone(); - - let left_map = NodeEdgeMap { - nodes: FiniteFunction::::new(VecArray(vec![0, 0, 1, 1]), left.nodes.len()) - .unwrap(), - edges: empty_map(left.edges.len()), - }; - let right_map = NodeEdgeMap { - nodes: FiniteFunction::::new(VecArray(vec![0, 1, 2, 3]), right.nodes.len()) - .unwrap(), - edges: empty_map(right.edges.len()), - }; - + let (host, apex, left, right, left_map, right_map, candidate) = + example_rewrite_pushout_complement_split_match_node_input(); let rule = Span::new(&apex, &left, &right, &left_map, &right_map); - let candidate = NodeEdgeMap { - nodes: FiniteFunction::::new( - VecArray(vec![f_node.0, f_node.0]), - host.nodes.len(), - ) - .unwrap(), - edges: empty_map(host.edges.len()), - }; let complements = rewrite(&host, &rule, &candidate); assert!(complements.len() == 61, "Found {}", complements.len()); @@ -910,4 +863,66 @@ mod tests { f_label, g_label, g, apex, left, right, left_map, right_map, candidate, ) } + + fn example_rewrite_pushout_complement_split_match_node_input() -> ( + Hypergraph, + Hypergraph, + Hypergraph, + Hypergraph, + NodeEdgeMap, + NodeEdgeMap, + NodeEdgeMap, + ) { + // [2] Example 1 + let mut host: Hypergraph = Hypergraph::empty(); + let f_node = host.new_node("w".to_string()); + let u_node = host.new_node("w".to_string()); + host.new_edge( + "e".to_string(), + Hyperedge { + sources: vec![u_node], + targets: vec![u_node], + }, + ); + host.new_edge( + "f".to_string(), + Hyperedge { + sources: vec![f_node], + targets: vec![f_node], + }, + ); + + let mut apex: Hypergraph = Hypergraph::empty(); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + + let mut left: Hypergraph = Hypergraph::empty(); + left.new_node("w".to_string()); + left.new_node("w".to_string()); + + let right = apex.clone(); + + let left_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0, 1, 1]), left.nodes.len()) + .unwrap(), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 1, 2, 3]), right.nodes.len()) + .unwrap(), + edges: empty_map(right.edges.len()), + }; + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new( + VecArray(vec![f_node.0, f_node.0]), + host.nodes.len(), + ) + .unwrap(), + edges: empty_map(host.edges.len()), + }; + + (host, apex, left, right, left_map, right_map, candidate) + } } From 256ca402897b0b881172bf590f12f60f30bd4590 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 06:28:38 +0100 Subject: [PATCH 34/41] improve test: ensure all components are found --- src/lax/rewrite.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 125e62d..9436e7c 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -714,9 +714,15 @@ mod tests { let complements = rewrite(&g, &rule, &candidate); assert_eq!(complements.len(), 5); - for expected_graph in expected { - assert!(complements.iter().any(|h| h == &expected_graph)); - } + let missing = expected + .iter() + .filter(|expected_graph| !complements.iter().any(|h| h == *expected_graph)) + .collect::>(); + assert!( + missing.is_empty(), + "Missing {} expected complement(s)", + missing.len() + ); } #[test] From b00d9935ead67466113fc75d6efe56dd2b6e60e3 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 07:02:19 +0100 Subject: [PATCH 35/41] refactor test --- src/lax/rewrite.rs | 81 ++++++++++++++++++---------------------------- 1 file changed, 31 insertions(+), 50 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 9436e7c..a2946a0 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -465,6 +465,32 @@ mod tests { let (f_label, g_label, g, apex, left, right, left_map, right_map, candidate) = example_rewrite_input(); let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + let build_expected = |complement: Hypergraph, + q: &FiniteFunction, + k0: NodeId, + k1: NodeId| + -> Hypergraph { + let interface_in_complement = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![k0.0, k1.0]), q.source()) + .unwrap(), + edges: empty_map(complement.edges.len()), + }; + let interface_to_complement = NodeEdgeMap { + nodes: interface_in_complement + .nodes + .compose(q) + .expect("interface to complement map"), + edges: empty_map(complement.edges.len()), + }; + let span = Span::new( + rule.apex, + rule.right, + &complement, + rule.right_map, + &interface_to_complement, + ); + span.pushout() + }; let mut expected = Vec::new(); let mut c1: Hypergraph = Hypergraph::empty(); @@ -505,16 +531,7 @@ mod tests { }, ); let (c1, q1) = c1.quotiented_with(vec![a1, a0], vec![k0, k1]); - let k0_c1 = NodeId(q1.table[k0.0]); - let k1_c1 = NodeId(q1.table[k1.0]); - let mut p1 = rule.right.coproduct(&c1); - let left_nodes = rule.right.nodes.len(); - p1.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); - p1.quotient.1.push(NodeId(k0_c1.0 + left_nodes)); - p1.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); - p1.quotient.1.push(NodeId(k1_c1.0 + left_nodes)); - p1.quotient(); - expected.push(p1); + expected.push(build_expected(c1, &q1, k0, k1)); let mut c2: Hypergraph = Hypergraph::empty(); let w1 = c2.new_node("w".to_string()); @@ -554,16 +571,7 @@ mod tests { }, ); let (c2, q2) = c2.quotiented_with(vec![a1, a0], vec![k1, k0]); - let k0_c2 = NodeId(q2.table[k0.0]); - let k1_c2 = NodeId(q2.table[k1.0]); - let mut p2 = rule.right.coproduct(&c2); - let left_nodes = rule.right.nodes.len(); - p2.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); - p2.quotient.1.push(NodeId(k0_c2.0 + left_nodes)); - p2.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); - p2.quotient.1.push(NodeId(k1_c2.0 + left_nodes)); - p2.quotient(); - expected.push(p2); + expected.push(build_expected(c2, &q2, k0, k1)); let mut c3: Hypergraph = Hypergraph::empty(); let w1 = c3.new_node("w".to_string()); @@ -603,16 +611,7 @@ mod tests { }, ); let (c3, q3) = c3.quotiented_with(vec![a0, a0], vec![a1, k1]); - let k0_c3 = NodeId(q3.table[k0.0]); - let k1_c3 = NodeId(q3.table[k1.0]); - let mut p3 = rule.right.coproduct(&c3); - let left_nodes = rule.right.nodes.len(); - p3.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); - p3.quotient.1.push(NodeId(k0_c3.0 + left_nodes)); - p3.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); - p3.quotient.1.push(NodeId(k1_c3.0 + left_nodes)); - p3.quotient(); - expected.push(p3); + expected.push(build_expected(c3, &q3, k0, k1)); let mut c4: Hypergraph = Hypergraph::empty(); let w1 = c4.new_node("w".to_string()); @@ -652,16 +651,7 @@ mod tests { }, ); let (c4, q4) = c4.quotiented_with(vec![a0, a0], vec![a1, k0]); - let k0_c4 = NodeId(q4.table[k0.0]); - let k1_c4 = NodeId(q4.table[k1.0]); - let mut p4 = rule.right.coproduct(&c4); - let left_nodes = rule.right.nodes.len(); - p4.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); - p4.quotient.1.push(NodeId(k0_c4.0 + left_nodes)); - p4.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); - p4.quotient.1.push(NodeId(k1_c4.0 + left_nodes)); - p4.quotient(); - expected.push(p4); + expected.push(build_expected(c4, &q4, k0, k1)); let mut c5: Hypergraph = Hypergraph::empty(); let w1 = c5.new_node("w".to_string()); @@ -701,16 +691,7 @@ mod tests { }, ); let (c5, q5) = c5.quotiented_with(vec![a0, a0, a0], vec![a1, k0, k1]); - let k0_c5 = NodeId(q5.table[k0.0]); - let k1_c5 = NodeId(q5.table[k1.0]); - let mut p5 = rule.right.coproduct(&c5); - let left_nodes = rule.right.nodes.len(); - p5.quotient.0.push(NodeId(rule.right_map.nodes.table[0])); - p5.quotient.1.push(NodeId(k0_c5.0 + left_nodes)); - p5.quotient.0.push(NodeId(rule.right_map.nodes.table[1])); - p5.quotient.1.push(NodeId(k1_c5.0 + left_nodes)); - p5.quotient(); - expected.push(p5); + expected.push(build_expected(c5, &q5, k0, k1)); let complements = rewrite(&g, &rule, &candidate); assert_eq!(complements.len(), 5); From 509621db9ba23102c2892da266f7360338b7940e Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 07:58:00 +0100 Subject: [PATCH 36/41] test util for building fixtures --- src/lax/mod.rs | 3 + src/lax/rewrite.rs | 329 +++++++++++++----------------------------- src/lax/test_utils.rs | 69 +++++++++ 3 files changed, 170 insertions(+), 231 deletions(-) create mode 100644 src/lax/test_utils.rs diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 646d9f7..71b4efc 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -85,3 +85,6 @@ pub use rewrite::*; pub mod optic; pub mod var; + +#[cfg(test)] +pub mod test_utils; diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index a2946a0..7aff9b4 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -311,7 +311,6 @@ mod tests { fn empty_map(target: usize) -> FiniteFunction { FiniteFunction::::new(VecArray(vec![]), target).unwrap() } - #[test] fn test_exploded_context_construction() { let (f_label, g_label, g, apex, left, right, left_map, right_map, candidate) = @@ -462,236 +461,104 @@ mod tests { #[test] fn test_rewrite_complements_working_example() { - let (f_label, g_label, g, apex, left, right, left_map, right_map, candidate) = - example_rewrite_input(); + let (_, _, g, apex, left, right, left_map, right_map, candidate) = example_rewrite_input(); let rule = Span::new(&apex, &left, &right, &left_map, &right_map); - let build_expected = |complement: Hypergraph, - q: &FiniteFunction, - k0: NodeId, - k1: NodeId| - -> Hypergraph { - let interface_in_complement = NodeEdgeMap { - nodes: FiniteFunction::::new(VecArray(vec![k0.0, k1.0]), q.source()) - .unwrap(), - edges: empty_map(complement.edges.len()), - }; - let interface_to_complement = NodeEdgeMap { - nodes: interface_in_complement - .nodes - .compose(q) - .expect("interface to complement map"), - edges: empty_map(complement.edges.len()), - }; - let span = Span::new( - rule.apex, - rule.right, - &complement, - rule.right_map, - &interface_to_complement, - ); - span.pushout() - }; - let mut expected = Vec::new(); - - let mut c1: Hypergraph = Hypergraph::empty(); - let w1 = c1.new_node("w".to_string()); - let w2 = c1.new_node("w".to_string()); - let w3 = c1.new_node("w".to_string()); - let w5 = c1.new_node("w".to_string()); - let a0 = c1.new_node("w".to_string()); - let a1 = c1.new_node("w".to_string()); - let k0 = c1.new_node("w".to_string()); - let k1 = c1.new_node("w".to_string()); - c1.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![w2], - }, - ); - c1.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![w2], - targets: vec![w3], - }, - ); - c1.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![a0], - }, - ); - c1.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![a1], - targets: vec![w5], - }, - ); - let (c1, q1) = c1.quotiented_with(vec![a1, a0], vec![k0, k1]); - expected.push(build_expected(c1, &q1, k0, k1)); - - let mut c2: Hypergraph = Hypergraph::empty(); - let w1 = c2.new_node("w".to_string()); - let w2 = c2.new_node("w".to_string()); - let w3 = c2.new_node("w".to_string()); - let w5 = c2.new_node("w".to_string()); - let a0 = c2.new_node("w".to_string()); - let a1 = c2.new_node("w".to_string()); - let k0 = c2.new_node("w".to_string()); - let k1 = c2.new_node("w".to_string()); - c2.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![w2], - }, - ); - c2.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![w2], - targets: vec![w3], - }, - ); - c2.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![a0], - }, - ); - c2.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![a1], - targets: vec![w5], - }, - ); - let (c2, q2) = c2.quotiented_with(vec![a1, a0], vec![k1, k0]); - expected.push(build_expected(c2, &q2, k0, k1)); - - let mut c3: Hypergraph = Hypergraph::empty(); - let w1 = c3.new_node("w".to_string()); - let w2 = c3.new_node("w".to_string()); - let w3 = c3.new_node("w".to_string()); - let w5 = c3.new_node("w".to_string()); - let a0 = c3.new_node("w".to_string()); - let a1 = c3.new_node("w".to_string()); - let k0 = c3.new_node("w".to_string()); - let k1 = c3.new_node("w".to_string()); - c3.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![w2], - }, - ); - c3.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![w2], - targets: vec![w3], - }, - ); - c3.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![a0], - }, - ); - c3.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![a1], - targets: vec![w5], - }, - ); - let (c3, q3) = c3.quotiented_with(vec![a0, a0], vec![a1, k1]); - expected.push(build_expected(c3, &q3, k0, k1)); - - let mut c4: Hypergraph = Hypergraph::empty(); - let w1 = c4.new_node("w".to_string()); - let w2 = c4.new_node("w".to_string()); - let w3 = c4.new_node("w".to_string()); - let w5 = c4.new_node("w".to_string()); - let a0 = c4.new_node("w".to_string()); - let a1 = c4.new_node("w".to_string()); - let k0 = c4.new_node("w".to_string()); - let k1 = c4.new_node("w".to_string()); - c4.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![w2], - }, - ); - c4.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![w2], - targets: vec![w3], - }, - ); - c4.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![a0], - }, - ); - c4.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![a1], - targets: vec![w5], - }, - ); - let (c4, q4) = c4.quotiented_with(vec![a0, a0], vec![a1, k0]); - expected.push(build_expected(c4, &q4, k0, k1)); - - let mut c5: Hypergraph = Hypergraph::empty(); - let w1 = c5.new_node("w".to_string()); - let w2 = c5.new_node("w".to_string()); - let w3 = c5.new_node("w".to_string()); - let w5 = c5.new_node("w".to_string()); - let a0 = c5.new_node("w".to_string()); - let a1 = c5.new_node("w".to_string()); - let k0 = c5.new_node("w".to_string()); - let k1 = c5.new_node("w".to_string()); - c5.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![w2], - }, - ); - c5.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![w2], - targets: vec![w3], - }, - ); - c5.new_edge( - f_label.clone(), - Hyperedge { - sources: vec![w1], - targets: vec![a0], - }, - ); - c5.new_edge( - g_label.clone(), - Hyperedge { - sources: vec![a1], - targets: vec![w5], - }, - ); - let (c5, q5) = c5.quotiented_with(vec![a0, a0, a0], vec![a1, k0, k1]); - expected.push(build_expected(c5, &q5, k0, k1)); + let expected = vec![ + crate::hg! { + nodes: { + a0a1k0k1: "w", + b2: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a0a1k0k1], [b2]), + ("g", [b2], [a0a1k0k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0a1k0k1]), + ("g", [a0a1k0k1], [w5]), + ], + }, + crate::hg! { + nodes: { + a0k0: "w", + b2: "w", + a1k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a0k0], [b2]), + ("g", [b2], [a1k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0k0]), + ("g", [a1k1], [w5]), + ], + }, + crate::hg! { + nodes: { + a1k0: "w", + b2: "w", + a0k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a1k0], [b2]), + ("g", [b2], [a0k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0k1]), + ("g", [a1k0], [w5]), + ], + }, + crate::hg! { + nodes: { + k0: "w", + b2: "w", + a0a1k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [k0], [b2]), + ("g", [b2], [a0a1k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0a1k1]), + ("g", [a0a1k1], [w5]), + ], + }, + crate::hg! { + nodes: { + a0a1k0: "w", + b2: "w", + k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a0a1k0], [b2]), + ("g", [b2], [k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0a1k0]), + ("g", [a0a1k0], [w5]), + ], + }, + ]; let complements = rewrite(&g, &rule, &candidate); assert_eq!(complements.len(), 5); @@ -701,7 +568,7 @@ mod tests { .collect::>(); assert!( missing.is_empty(), - "Missing {} expected complement(s)", + "Missing {} expected complement(s).", missing.len() ); } diff --git a/src/lax/test_utils.rs b/src/lax/test_utils.rs new file mode 100644 index 0000000..3ee1b1a --- /dev/null +++ b/src/lax/test_utils.rs @@ -0,0 +1,69 @@ +use crate::lax::{Hyperedge, Hypergraph}; + +pub(crate) fn build_hypergraph<'a>( + node_types: Vec<&'a str>, + edges: Vec<(&'a str, Vec, Vec)>, +) -> Hypergraph { + let mut graph: Hypergraph = Hypergraph::empty(); + let node_ids = node_types + .into_iter() + .map(|label| graph.new_node(label.to_string())) + .collect::>(); + + for (label, sources, targets) in edges { + let sources = sources + .into_iter() + .map(|idx| { + *node_ids + .get(idx) + .unwrap_or_else(|| panic!("edge source index {} out of range", idx)) + }) + .collect::>(); + let targets = targets + .into_iter() + .map(|idx| { + *node_ids + .get(idx) + .unwrap_or_else(|| panic!("edge target index {} out of range", idx)) + }) + .collect::>(); + graph.new_edge(label.to_string(), Hyperedge { sources, targets }); + } + + graph +} + +#[macro_export] +macro_rules! hg { + ( + nodes: { $($node:ident : $node_ty:expr),* $(,)? }, + edges: [ + $( + ($edge_label:expr, [$($src:ident),* $(,)?], [$($tgt:ident),* $(,)?]) + ),* $(,)? + ] $(,)? + ) => {{ + // Node identifiers are only for readability/debugging; they do not persist in the graph. + // Future: consider adding logical node names to Hypergraph for improved diagnostics. + let node_types = vec![$($node_ty),*]; + let node_names = vec![$(stringify!($node)),*]; + let mut node_index = std::collections::HashMap::new(); + for (idx, name) in node_names.iter().enumerate() { + node_index.insert(*name, idx); + } + let edges = vec![ + $( + ( + $edge_label, + vec![ + $( *node_index.get(stringify!($src)).expect("unknown source node") ),* + ], + vec![ + $( *node_index.get(stringify!($tgt)).expect("unknown target node") ),* + ], + ) + ),* + ]; + $crate::lax::test_utils::build_hypergraph(node_types, edges) + }}; +} From 8e0ffb790b393505b1d42fa138d50edce2a6e64e Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 09:47:42 +0100 Subject: [PATCH 37/41] assert expected structure --- src/lax/hypergraph.rs | 5 +++++ src/lax/rewrite.rs | 48 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 3643e46..4bd274a 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -568,6 +568,11 @@ impl Hypergraph { &self, excluded: &NodeEdgeMap, ) -> (Hypergraph, NodeEdgeMap) { + assert_eq!( + self.edges.len(), + self.adjacency.len(), + "malformed hypergraph: edges and adjacency lengths differ" + ); let mut in_image_nodes = vec![false; self.nodes.len()]; for &idx in excluded.nodes.table.iter() { if idx >= self.nodes.len() { diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 7aff9b4..5ee050a 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -226,6 +226,54 @@ fn validate_candidate_map( g.edges.len() ); } + + for (edge_id, edge) in rule.left.adjacency.iter().enumerate() { + let host_edge_id = candidate.edges.table[edge_id]; + let host_edge = g.adjacency.get(host_edge_id).unwrap_or_else(|| { + panic!( + "candidate edge image out of bounds: got {}, max {}", + host_edge_id, + g.adjacency.len() + ) + }); + + if edge.sources.len() != host_edge.sources.len() + || edge.targets.len() != host_edge.targets.len() + { + panic!( + "candidate edge image arity mismatch for edge {}", + edge_id + ); + } + + for (idx, node) in edge.sources.iter().enumerate() { + let image = candidate.nodes.table[node.0]; + let host_node = host_edge.sources[idx].0; + if image != host_node { + panic!( + "candidate edge image source mismatch for edge {} at position {}: got {}, expected {}", + edge_id, + idx, + host_node, + image + ); + } + } + + for (idx, node) in edge.targets.iter().enumerate() { + let image = candidate.nodes.table[node.0]; + let host_node = host_edge.targets[idx].0; + if image != host_node { + panic!( + "candidate edge image target mismatch for edge {} at position {}: got {}, expected {}", + edge_id, + idx, + host_node, + image + ); + } + } + } } fn identification_condition(rule: &Span<'_, O, A>, candidate: &NodeEdgeMap) -> bool { From 6b6341113943608bcddf2e04bf1a7cae2f2c5886 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 10:10:47 +0100 Subject: [PATCH 38/41] label preserv invariant --- src/lax/rewrite.rs | 38 +++++++++++++++++++++++++++++++++----- tests/lax/rewrite.rs | 2 +- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index 5ee050a..f55d36d 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -193,7 +193,7 @@ fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec( +fn validate_candidate_map( rule: &Span<'_, O, A>, g: &Hypergraph, candidate: &NodeEdgeMap, @@ -227,6 +227,20 @@ fn validate_candidate_map( ); } + for (node_id, node_label) in rule.left.nodes.iter().enumerate() { + let host_node_id = candidate.nodes.table[node_id]; + let host_label = g.nodes.get(host_node_id).unwrap_or_else(|| { + panic!( + "candidate node image out of bounds: got {}, max {}", + host_node_id, + g.nodes.len() + ) + }); + if host_label != node_label { + panic!("candidate node label mismatch for node {}", node_id); + } + } + for (edge_id, edge) in rule.left.adjacency.iter().enumerate() { let host_edge_id = candidate.edges.table[edge_id]; let host_edge = g.adjacency.get(host_edge_id).unwrap_or_else(|| { @@ -236,14 +250,28 @@ fn validate_candidate_map( g.adjacency.len() ) }); + let host_edge_label = g.edges.get(host_edge_id).unwrap_or_else(|| { + panic!( + "candidate edge image out of bounds: got {}, max {}", + host_edge_id, + g.edges.len() + ) + }); + let edge_label = rule.left.edges.get(edge_id).unwrap_or_else(|| { + panic!( + "left edge index out of bounds: got {}, max {}", + edge_id, + rule.left.edges.len() + ) + }); + if host_edge_label != edge_label { + panic!("candidate edge label mismatch for edge {}", edge_id); + } if edge.sources.len() != host_edge.sources.len() || edge.targets.len() != host_edge.targets.len() { - panic!( - "candidate edge image arity mismatch for edge {}", - edge_id - ); + panic!("candidate edge image arity mismatch for edge {}", edge_id); } for (idx, node) in edge.sources.iter().enumerate() { diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index a7c9727..709b5a4 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -27,7 +27,7 @@ fn test_rewrite_identification_fails() { // K = ∅, L = {a, b}. G has one node w and m(a) = m(b) = w. let mut l: Hypergraph = Hypergraph::empty(); l.new_node(1); - l.new_node(2); + l.new_node(1); let r: Hypergraph = Hypergraph::empty(); let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); From 44070359829824dc1818f4128df810dfb4013931 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 10:11:37 +0100 Subject: [PATCH 39/41] add invariant --- src/lax/rewrite.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index f55d36d..fb7c53b 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -226,6 +226,11 @@ fn validate_candidate_map( g.edges.len() ); } + assert_eq!( + rule.left.edges.len(), + rule.left.adjacency.len(), + "malformed hypergraph: edges and adjacency lengths differ" + ); for (node_id, node_label) in rule.left.nodes.iter().enumerate() { let host_node_id = candidate.nodes.table[node_id]; From ce9524bfddc5b194183ed478550bcf94b6c811a6 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 13 Jan 2026 10:25:22 +0100 Subject: [PATCH 40/41] add invariant --- src/lax/rewrite.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs index fb7c53b..f0d7d40 100644 --- a/src/lax/rewrite.rs +++ b/src/lax/rewrite.rs @@ -226,6 +226,11 @@ fn validate_candidate_map( g.edges.len() ); } + assert_eq!( + g.edges.len(), + g.adjacency.len(), + "malformed hypergraph: edges and adjacency lengths differ" + ); assert_eq!( rule.left.edges.len(), rule.left.adjacency.len(), From 489fcc9012d6b8a96f0063c393975a4c59d44f12 Mon Sep 17 00:00:00 2001 From: mstn Date: Thu, 5 Feb 2026 07:52:36 +0100 Subject: [PATCH 41/41] add test for left linear --- tests/lax/rewrite.rs | 80 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs index 709b5a4..ad25eeb 100644 --- a/tests/lax/rewrite.rs +++ b/tests/lax/rewrite.rs @@ -93,3 +93,83 @@ fn test_rewrite_gluing_ok() { let complements = rewrite(&g, &rule, &candidate); assert!(!complements.is_empty()); } + +#[test] +fn test_rewrite_left_linear_unique_complement() { + // Pattern L: three nodes with a single edge 100: l0 -> (l1, l2). + // Target G: four nodes with edge 100: g0 -> (g1, g2) matched by L, + // plus extra context edge 300: g0 -> g1. + // K -> L is injective (left-linear), and the match satisfies gluing, + // so there is a unique complement and thus a unique rewrite. + let mut apex: Hypergraph = Hypergraph::empty(); + apex.new_node(1); + apex.new_node(1); + + let mut l: Hypergraph = Hypergraph::empty(); + let l0 = l.new_node(1); + let l1 = l.new_node(1); + let l2 = l.new_node(1); + l.new_edge( + 100, + Hyperedge { + sources: vec![l0], + targets: vec![l1, l2], + }, + ); + + let mut r: Hypergraph = Hypergraph::empty(); + let r0 = r.new_node(1); + let r1 = r.new_node(1); + let r2 = r.new_node(1); + r.new_edge( + 200, + Hyperedge { + sources: vec![r0, r2], + targets: vec![r1], + }, + ); + + let left_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 1]), l.nodes.len()).unwrap(), + edges: empty_map(l.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 1]), r.nodes.len()).unwrap(), + edges: empty_map(r.edges.len()), + }; + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); + + let mut g: Hypergraph = Hypergraph::empty(); + let g0 = g.new_node(1); + let g1 = g.new_node(1); + let g2 = g.new_node(1); + g.new_node(1); + g.new_edge( + 100, + Hyperedge { + sources: vec![g0], + targets: vec![g1, g2], + }, + ); + g.new_edge( + 300, + Hyperedge { + sources: vec![g0], + targets: vec![g1], + }, + ); + + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 1, 2]), g.nodes.len()).unwrap(), + edges: FiniteFunction::::new(VecArray(vec![0]), g.edges.len()).unwrap(), + }; + + let rewrites = rewrite(&g, &rule, &candidate); + assert_eq!(rewrites.len(), 1); + assert_eq!(rewrites[0].nodes.len(), 4); + assert_eq!(rewrites[0].edges.len(), 2); + assert_eq!(rewrites[0].adjacency.len(), 2); + assert!(rewrites[0].edges.contains(&200)); + assert!(rewrites[0].edges.contains(&300)); + assert!(!rewrites[0].edges.contains(&100)); +}