diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 3d709ba..4bd274a 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -1,5 +1,7 @@ use crate::array::vec::{VecArray, VecKind}; +use crate::category::Coproduct; use crate::finite_function::*; +use crate::lax::Arrow; use core::fmt::Debug; @@ -18,6 +20,156 @@ pub struct Hyperedge { pub targets: Vec, } +/// A map on nodes and edges without structure preservation guarantees. +#[derive(Clone)] +pub struct NodeEdgeMap { + pub nodes: FiniteFunction, + pub edges: FiniteFunction, +} + +impl NodeEdgeMap { + pub fn compose(&self, other: &NodeEdgeMap) -> Self { + NodeEdgeMap { + nodes: self.nodes.compose(&other.nodes).expect("node map compose"), + edges: self.edges.compose(&other.edges).expect("edge map compose"), + } + } + + pub fn coproduct(&self, other: &NodeEdgeMap) -> Self { + NodeEdgeMap { + nodes: self + .nodes + .coproduct(&other.nodes) + .expect("node map coproduct"), + edges: self + .edges + .coproduct(&other.edges) + .expect("edge map coproduct"), + } + } +} + +#[derive(Clone, Copy)] +pub struct Span<'a, O, A> { + pub apex: &'a Hypergraph, + pub left: &'a Hypergraph, + pub right: &'a Hypergraph, + pub left_map: &'a NodeEdgeMap, + pub right_map: &'a NodeEdgeMap, +} + +impl<'a, O, A> Span<'a, O, A> { + /// Construct a lax span and validate its structural properties. + pub fn new( + apex: &'a Hypergraph, + left: &'a Hypergraph, + right: &'a Hypergraph, + left_map: &'a NodeEdgeMap, + right_map: &'a NodeEdgeMap, + ) -> Self { + Span { + apex, + left, + right, + left_map, + right_map, + } + .validate() + } + + /// Validate that maps are compatible with their hypergraphs. + pub fn validate(self) -> Self { + if !self.apex.edges.is_empty() { + panic!( + "apex must be discrete (no edges), got {} edge(s)", + self.apex.edges.len() + ); + } + if self.left_map.nodes.source() != self.apex.nodes.len() { + panic!( + "left map node source size mismatch: got {}, expected {}", + self.left_map.nodes.source(), + self.apex.nodes.len() + ); + } + if self.left_map.nodes.target() != self.left.nodes.len() { + panic!( + "left map node target size mismatch: got {}, expected {}", + self.left_map.nodes.target(), + self.left.nodes.len() + ); + } + if self.left_map.edges.source() != self.apex.edges.len() { + panic!( + "left map edge source size mismatch: got {}, expected {}", + self.left_map.edges.source(), + self.apex.edges.len() + ); + } + if self.left_map.edges.target() != self.left.edges.len() { + panic!( + "left map edge target size mismatch: got {}, expected {}", + self.left_map.edges.target(), + self.left.edges.len() + ); + } + if self.right_map.nodes.source() != self.apex.nodes.len() { + panic!( + "right map node source size mismatch: got {}, expected {}", + self.right_map.nodes.source(), + self.apex.nodes.len() + ); + } + if self.right_map.nodes.target() != self.right.nodes.len() { + panic!( + "right map node target size mismatch: got {}, expected {}", + self.right_map.nodes.target(), + self.right.nodes.len() + ); + } + if self.right_map.edges.source() != self.apex.edges.len() { + panic!( + "right map edge source size mismatch: got {}, expected {}", + self.right_map.edges.source(), + self.apex.edges.len() + ); + } + if self.right_map.edges.target() != self.right.edges.len() { + panic!( + "right map edge target size mismatch: got {}, expected {}", + self.right_map.edges.target(), + self.right.edges.len() + ); + } + + self + } + + /// Compute the pushout of the span, identifying only nodes. + /// + /// NOTE: this assumes the apex is discrete (no edges), so edge identifications are ignored. + pub fn pushout(&self) -> Hypergraph + where + O: Clone + PartialEq, + A: Clone + PartialEq, + { + debug_assert!( + self.apex.edges.is_empty(), + "pushout assumes discrete apex (no edge identifications)" + ); + + let mut pushout = self.left.coproduct(self.right); + let left_nodes = self.left.nodes.len(); + for (k_idx, &l_idx) in self.left_map.nodes.table.iter().enumerate() { + let r_idx = self.right_map.nodes.table[k_idx] + left_nodes; + pushout.quotient.0.push(NodeId(l_idx)); + pushout.quotient.1.push(NodeId(r_idx)); + } + pushout.quotient(); + pushout + } +} + /// Create a [`Hyperedge`] from a tuple of `(sources, targets)`. /// /// This allows convenient creation of hyperedges from various collection types: @@ -337,6 +489,17 @@ impl Hypergraph { } impl Hypergraph { + /// Return a quotiented copy along with the coequalizer used. + pub fn quotiented_with( + mut self, + quotient_left: Vec, + quotient_right: Vec, + ) -> (Self, FiniteFunction) { + self.quotient = (quotient_left, quotient_right); + let q = self.quotient(); + (self, q) + } + /// Construct a [`Hypergraph`] by identifying nodes in the quotient map. /// Mutably quotient this [`Hypergraph`], returning the coequalizer calculated from `self.quotient`. /// @@ -401,6 +564,119 @@ pub(crate) fn concat(v1: &[T], v2: &[T]) -> Vec { } impl Hypergraph { + pub fn remainder_with_injection( + &self, + excluded: &NodeEdgeMap, + ) -> (Hypergraph, NodeEdgeMap) { + assert_eq!( + self.edges.len(), + self.adjacency.len(), + "malformed hypergraph: edges and adjacency lengths differ" + ); + let mut in_image_nodes = vec![false; self.nodes.len()]; + for &idx in excluded.nodes.table.iter() { + if idx >= self.nodes.len() { + panic!( + "excluded node index out of range: got {}, max {}", + idx, + self.nodes.len() + ); + } + in_image_nodes[idx] = true; + } + + let mut in_image_edges = vec![false; self.edges.len()]; + for &idx in excluded.edges.table.iter() { + if idx >= self.edges.len() { + panic!( + "excluded edge index out of range: got {}, max {}", + idx, + self.edges.len() + ); + } + in_image_edges[idx] = true; + } + + let mut remainder = Hypergraph::empty(); + let mut remainder_node_to_host = Vec::new(); + let mut node_map: Vec> = vec![None; self.nodes.len()]; + for (idx, label) in self.nodes.iter().enumerate() { + if in_image_nodes[idx] { + continue; + } + let new_id = remainder.new_node(label.clone()); + remainder_node_to_host.push(idx); + node_map[idx] = Some(new_id.0); + } + + let mut remainder_edge_to_host = Vec::new(); + for (edge_id, edge) in self.adjacency.iter().enumerate() { + if in_image_edges[edge_id] { + continue; + } + + let mut sources = Vec::with_capacity(edge.sources.len()); + for node in &edge.sources { + let new_id = match node_map[node.0] { + Some(existing) => NodeId(existing), + None => { + let new_id = remainder.new_node(self.nodes[node.0].clone()); + remainder_node_to_host.push(node.0); + new_id + } + }; + sources.push(new_id); + } + + let mut targets = Vec::with_capacity(edge.targets.len()); + for node in &edge.targets { + let new_id = match node_map[node.0] { + Some(existing) => NodeId(existing), + None => { + let new_id = remainder.new_node(self.nodes[node.0].clone()); + remainder_node_to_host.push(node.0); + new_id + } + }; + targets.push(new_id); + } + + remainder.new_edge(self.edges[edge_id].clone(), Hyperedge { sources, targets }); + remainder_edge_to_host.push(edge_id); + } + + let remainder_in_host = NodeEdgeMap { + nodes: FiniteFunction::::new( + VecArray(remainder_node_to_host), + self.nodes.len(), + ) + .expect("remainder node injection"), + edges: FiniteFunction::::new( + VecArray(remainder_edge_to_host), + self.edges.len(), + ) + .expect("remainder edge injection"), + }; + + (remainder, remainder_in_host) + } + + pub(crate) fn coproduct_with_injections( + &self, + other: &Hypergraph, + ) -> (Hypergraph, NodeEdgeMap, NodeEdgeMap) { + let coproduct = self.coproduct(other); + let left = NodeEdgeMap { + nodes: FiniteFunction::::identity(self.nodes.len()).inject0(other.nodes.len()), + edges: FiniteFunction::::identity(self.edges.len()).inject0(other.edges.len()), + }; + let right = NodeEdgeMap { + nodes: FiniteFunction::::identity(other.nodes.len()).inject1(self.nodes.len()), + edges: FiniteFunction::::identity(other.edges.len()).inject1(self.edges.len()), + }; + (coproduct, left, right) + } + pub(crate) fn coproduct(&self, other: &Hypergraph) -> Hypergraph { let n = self.nodes.len(); diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 4cf746c..71b4efc 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -76,10 +76,15 @@ pub mod functor; pub mod hypergraph; pub mod mut_category; pub mod open_hypergraph; +pub mod rewrite; pub use crate::category::*; pub use hypergraph::*; pub use open_hypergraph::*; +pub use rewrite::*; pub mod optic; pub mod var; + +#[cfg(test)] +pub mod test_utils; diff --git a/src/lax/rewrite.rs b/src/lax/rewrite.rs new file mode 100644 index 0000000..f0d7d40 --- /dev/null +++ b/src/lax/rewrite.rs @@ -0,0 +1,868 @@ +use crate::array::vec::VecKind; +use crate::finite_function::FiniteFunction; +use crate::lax::{Arrow, Coproduct as _, Hypergraph, NodeEdgeMap, NodeId, Span}; +use crate::partition::{enumerate_partitions, Partition, PartitionInput}; + +struct ExplodedContext { + remainder_plus_interface: Hypergraph, + to_host: NodeEdgeMap, + to_remainder_plus_redex: NodeEdgeMap, + interface_in_exploded: NodeEdgeMap, +} + +impl ExplodedContext { + fn interface_in_exploded(&self) -> &NodeEdgeMap { + &self.interface_in_exploded + } +} + +/// Rewrite a lax hypergraph using a rule span and candidate map. +pub fn rewrite( + g: &Hypergraph, + rule: &Span<'_, O, A>, + candidate: &NodeEdgeMap, +) -> Vec> { + validate_candidate_map(rule, g, candidate); + if !identification_condition(rule, candidate) || !dangling_condition(rule, candidate, g) { + return Vec::new(); + } + let exploded = exploded_context(g, rule, candidate); + let fiber_inputs = fiber_partition_inputs(&exploded); + let partitions_per_fiber: Vec>> = + fiber_inputs.iter().map(enumerate_partitions).collect(); + + if partitions_per_fiber.is_empty() { + return vec![pushout_result(&exploded, rule, &partitions_per_fiber, &[])]; + } + + let mut complements = Vec::new(); + let mut selection: Vec = Vec::with_capacity(partitions_per_fiber.len()); + + fn pushout_result( + exploded: &ExplodedContext, + rule: &Span<'_, O, A>, + partitions_per_fiber: &[Vec>], + selection: &[usize], + ) -> Hypergraph { + let mut quotient_left = Vec::new(); + let mut quotient_right = Vec::new(); + + for (fiber_idx, &partition_idx) in selection.iter().enumerate() { + let partition = &partitions_per_fiber[fiber_idx][partition_idx]; + for block in &partition.blocks { + let Some((first, rest)) = block.elements.split_first() else { + continue; + }; + for node in rest { + quotient_left.push(*first); + quotient_right.push(*node); + } + } + } + + let (complement, q) = exploded + .remainder_plus_interface + .clone() + .quotiented_with(quotient_left, quotient_right); + + let interface_to_complement = { + let nodes = exploded + .interface_in_exploded() + .nodes + .compose(&q) + .expect("interface to complement map"); + NodeEdgeMap { + nodes, + edges: FiniteFunction::::initial(complement.edges.len()), + } + }; + + let span = Span::new( + rule.apex, + rule.right, + &complement, + rule.right_map, + &interface_to_complement, + ); + span.pushout() + } + + fn walk( + idx: usize, + exploded: &ExplodedContext, + rule: &Span<'_, O, A>, + partitions_per_fiber: &[Vec>], + selection: &mut Vec, + results: &mut Vec>, + ) { + if idx == partitions_per_fiber.len() { + results.push(pushout_result( + exploded, + rule, + partitions_per_fiber, + selection, + )); + return; + } + + for partition_idx in 0..partitions_per_fiber[idx].len() { + selection.push(partition_idx); + walk( + idx + 1, + exploded, + rule, + partitions_per_fiber, + selection, + results, + ); + selection.pop(); + } + } + + walk( + 0, + &exploded, + rule, + &partitions_per_fiber, + &mut selection, + &mut complements, + ); + + complements +} + +fn exploded_context( + host: &Hypergraph, + rule: &Span<'_, O, A>, + matching: &NodeEdgeMap, +) -> ExplodedContext { + let (remainder, remainder_in_host) = host.remainder_with_injection(matching); + let q_interface = rule.left_map.compose(matching); + let to_host = remainder_in_host.coproduct(&q_interface); + + let (_remainder_plus_redex, remainder_in_remainder_plus_redex, redex_in_remainder_plus_redex) = + remainder.coproduct_with_injections(rule.left); + let to_remainder_plus_redex = remainder_in_remainder_plus_redex + .coproduct(&rule.left_map.compose(&redex_in_remainder_plus_redex)); + + let (remainder_plus_interface, _remainder_in_exploded, interface_in_exploded) = + remainder.coproduct_with_injections(rule.apex); + + ExplodedContext { + remainder_plus_interface, + to_host, + to_remainder_plus_redex, + interface_in_exploded, + } +} + +fn fiber_partition_inputs(exploded: &ExplodedContext) -> Vec> { + let mut fibers = vec![Vec::new(); exploded.to_host.nodes.target()]; + for (src, &tgt) in exploded.to_host.nodes.table.iter().enumerate() { + fibers[tgt].push(NodeId(src)); + } + + fibers + .into_iter() + .filter(|nodes| !nodes.is_empty()) + .map(|nodes| { + // f' refines q, so f'-classes are contained within each q-fiber. + let mut class_index = vec![None; exploded.to_remainder_plus_redex.nodes.target()]; + let mut class_ids = Vec::with_capacity(nodes.len()); + let mut next_class = 0; + for node in &nodes { + let f_image = exploded.to_remainder_plus_redex.nodes.table[node.0]; + let id = match class_index[f_image] { + Some(existing) => existing, + None => { + let id = next_class; + next_class += 1; + class_index[f_image] = Some(id); + id + } + }; + class_ids.push(id); + } + + PartitionInput { + elements: nodes, + class_ids, + class_count: next_class, + } + }) + .collect() +} + +fn validate_candidate_map( + rule: &Span<'_, O, A>, + g: &Hypergraph, + candidate: &NodeEdgeMap, +) { + if candidate.nodes.source() != rule.left.nodes.len() { + panic!( + "candidate map node source size mismatch: got {}, expected {}", + candidate.nodes.source(), + rule.left.nodes.len() + ); + } + if candidate.nodes.target() != g.nodes.len() { + panic!( + "candidate map node target size mismatch: got {}, expected {}", + candidate.nodes.target(), + g.nodes.len() + ); + } + if candidate.edges.source() != rule.left.edges.len() { + panic!( + "candidate map edge source size mismatch: got {}, expected {}", + candidate.edges.source(), + rule.left.edges.len() + ); + } + if candidate.edges.target() != g.edges.len() { + panic!( + "candidate map edge target size mismatch: got {}, expected {}", + candidate.edges.target(), + g.edges.len() + ); + } + assert_eq!( + g.edges.len(), + g.adjacency.len(), + "malformed hypergraph: edges and adjacency lengths differ" + ); + assert_eq!( + rule.left.edges.len(), + rule.left.adjacency.len(), + "malformed hypergraph: edges and adjacency lengths differ" + ); + + for (node_id, node_label) in rule.left.nodes.iter().enumerate() { + let host_node_id = candidate.nodes.table[node_id]; + let host_label = g.nodes.get(host_node_id).unwrap_or_else(|| { + panic!( + "candidate node image out of bounds: got {}, max {}", + host_node_id, + g.nodes.len() + ) + }); + if host_label != node_label { + panic!("candidate node label mismatch for node {}", node_id); + } + } + + for (edge_id, edge) in rule.left.adjacency.iter().enumerate() { + let host_edge_id = candidate.edges.table[edge_id]; + let host_edge = g.adjacency.get(host_edge_id).unwrap_or_else(|| { + panic!( + "candidate edge image out of bounds: got {}, max {}", + host_edge_id, + g.adjacency.len() + ) + }); + let host_edge_label = g.edges.get(host_edge_id).unwrap_or_else(|| { + panic!( + "candidate edge image out of bounds: got {}, max {}", + host_edge_id, + g.edges.len() + ) + }); + let edge_label = rule.left.edges.get(edge_id).unwrap_or_else(|| { + panic!( + "left edge index out of bounds: got {}, max {}", + edge_id, + rule.left.edges.len() + ) + }); + if host_edge_label != edge_label { + panic!("candidate edge label mismatch for edge {}", edge_id); + } + + if edge.sources.len() != host_edge.sources.len() + || edge.targets.len() != host_edge.targets.len() + { + panic!("candidate edge image arity mismatch for edge {}", edge_id); + } + + for (idx, node) in edge.sources.iter().enumerate() { + let image = candidate.nodes.table[node.0]; + let host_node = host_edge.sources[idx].0; + if image != host_node { + panic!( + "candidate edge image source mismatch for edge {} at position {}: got {}, expected {}", + edge_id, + idx, + host_node, + image + ); + } + } + + for (idx, node) in edge.targets.iter().enumerate() { + let image = candidate.nodes.table[node.0]; + let host_node = host_edge.targets[idx].0; + if image != host_node { + panic!( + "candidate edge image target mismatch for edge {} at position {}: got {}, expected {}", + edge_id, + idx, + host_node, + image + ); + } + } + } +} + +fn identification_condition(rule: &Span<'_, O, A>, candidate: &NodeEdgeMap) -> bool { + let mut in_image = vec![false; rule.left.nodes.len()]; + for i in 0..rule.left_map.nodes.source() { + let idx = rule.left_map.nodes.table[i]; + in_image[idx] = true; + } + + let mut seen = vec![None; candidate.nodes.target()]; + for i in 0..rule.left.nodes.len() { + if in_image[i] { + continue; + } + let img = candidate.nodes.table[i]; + if let Some(existing) = seen[img] { + if existing != i { + return false; + } + } else { + seen[img] = Some(i); + } + } + + true +} + +fn dangling_condition( + rule: &Span<'_, O, A>, + candidate: &NodeEdgeMap, + g: &Hypergraph, +) -> bool { + let mut in_l_image = vec![false; rule.left.nodes.len()]; + for i in 0..rule.left_map.nodes.source() { + let idx = rule.left_map.nodes.table[i]; + in_l_image[idx] = true; + } + + let mut forbidden_nodes = vec![false; g.nodes.len()]; + for i in 0..rule.left.nodes.len() { + if in_l_image[i] { + continue; + } + let img = candidate.nodes.table[i]; + forbidden_nodes[img] = true; + } + + let mut edge_in_image = vec![false; g.edges.len()]; + for i in 0..candidate.edges.source() { + let idx = candidate.edges.table[i]; + edge_in_image[idx] = true; + } + + for (edge_id, edge) in g.adjacency.iter().enumerate() { + if edge_in_image[edge_id] { + continue; + } + let touches_forbidden = edge + .sources + .iter() + .chain(edge.targets.iter()) + .any(|n| forbidden_nodes[n.0]); + if touches_forbidden { + return false; + } + } + + true +} + +#[cfg(test)] +mod tests { + // Examples from + // 1. Bonchi, Filippo, et al. "String diagram rewrite theory I: Rewriting with Frobenius structure." Journal of the ACM (JACM) 69.2 (2022): 1-58. + // 2. Heumüller, Marvin, et al. "Construction of pushout complements in the category of hypergraphs." Electronic Communications of the EASST 39 (2011). + use super::{exploded_context, fiber_partition_inputs, rewrite}; + use crate::array::vec::{VecArray, VecKind}; + use crate::finite_function::FiniteFunction; + use crate::lax::{Arrow as _, Hyperedge, Hypergraph, NodeEdgeMap, NodeId, Span}; + use crate::partition::{enumerate_partitions, Partition}; + use std::collections::HashMap; + + fn empty_map(target: usize) -> FiniteFunction { + FiniteFunction::::new(VecArray(vec![]), target).unwrap() + } + #[test] + fn test_exploded_context_construction() { + let (f_label, g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + let exploded = exploded_context(&g, &rule, &candidate); + + let mut expected: Hypergraph = Hypergraph::empty(); + let e_w1 = expected.new_node("w".to_string()); + let e_w2 = expected.new_node("w".to_string()); + let e_w3 = expected.new_node("w".to_string()); + let e_w5 = expected.new_node("w".to_string()); + let e_w4a = expected.new_node("w".to_string()); + let e_w4b = expected.new_node("w".to_string()); + + expected.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![e_w1], + targets: vec![e_w2], + }, + ); + expected.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![e_w2], + targets: vec![e_w3], + }, + ); + expected.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![e_w1], + targets: vec![e_w4a], + }, + ); + expected.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![e_w4b], + targets: vec![e_w5], + }, + ); + + expected.new_node("w".to_string()); + expected.new_node("w".to_string()); + + assert_eq!(exploded.remainder_plus_interface, expected); + } + + #[test] + fn test_f_prime_refines_q() { + let (_f_label, _g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + let exploded = exploded_context(&g, &rule, &candidate); + + let mut f_prime_to_q = vec![None; exploded.to_remainder_plus_redex.nodes.target()]; + for (src, &f_prime_image) in exploded + .to_remainder_plus_redex + .nodes + .table + .iter() + .enumerate() + { + let q_image = exploded.to_host.nodes.table[src]; + match f_prime_to_q[f_prime_image] { + Some(existing) => assert_eq!(existing, q_image), + None => f_prime_to_q[f_prime_image] = Some(q_image), + } + } + } + + #[test] + fn test_fiber_partitions_expected_blocks() { + let (_f_label, _g_label, g, apex, left, right, left_map, right_map, candidate) = + example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + let exploded = exploded_context(&g, &rule, &candidate); + let fibers = fiber_partition_inputs(&exploded); + + let copied_nodes = exploded.remainder_plus_interface.nodes.len() - rule.apex.nodes.len(); + let target_fiber = fibers + .iter() + .find(|fiber| { + let apex_count = fiber + .elements + .iter() + .filter(|node| node.0 >= copied_nodes) + .count(); + apex_count == rule.apex.nodes.len() + }) + .expect("expected fiber containing apex nodes"); + + let mut apex_nodes = target_fiber + .elements + .iter() + .cloned() + .filter(|node| node.0 >= copied_nodes) + .collect::>(); + apex_nodes.sort_by_key(|node| node.0); + + let mut w4_nodes = target_fiber + .elements + .iter() + .cloned() + .filter(|node| node.0 < copied_nodes) + .collect::>(); + w4_nodes.sort_by_key(|node| node.0); + + let mut name_map: HashMap = HashMap::new(); + name_map.insert(w4_nodes[0], "a0"); + name_map.insert(w4_nodes[1], "a1"); + name_map.insert(apex_nodes[0], "k0"); + name_map.insert(apex_nodes[1], "k1"); + + let partitions = enumerate_partitions(target_fiber); + let mut actual = partitions + .iter() + .map(|partition| normalize_partition(partition, &name_map)) + .collect::>(); + actual.sort(); + + let mut expected = vec![ + vec![vec!["a0", "k0"], vec!["a1", "k1"]], + vec![vec!["a0", "k1"], vec!["a1", "k0"]], + vec![vec!["k0"], vec!["a0", "a1", "k1"]], + vec![vec!["k1"], vec!["a0", "a1", "k0"]], + vec![vec!["a0", "a1", "k0", "k1"]], + ] + .into_iter() + .map(|blocks| { + let mut normalized = blocks + .into_iter() + .map(|mut block| { + block.sort(); + block + }) + .collect::>(); + normalized.sort(); + normalized + }) + .collect::>(); + expected.sort(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_rewrite_complements_working_example() { + let (_, _, g, apex, left, right, left_map, right_map, candidate) = example_rewrite_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + let expected = vec![ + crate::hg! { + nodes: { + a0a1k0k1: "w", + b2: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a0a1k0k1], [b2]), + ("g", [b2], [a0a1k0k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0a1k0k1]), + ("g", [a0a1k0k1], [w5]), + ], + }, + crate::hg! { + nodes: { + a0k0: "w", + b2: "w", + a1k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a0k0], [b2]), + ("g", [b2], [a1k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0k0]), + ("g", [a1k1], [w5]), + ], + }, + crate::hg! { + nodes: { + a1k0: "w", + b2: "w", + a0k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a1k0], [b2]), + ("g", [b2], [a0k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0k1]), + ("g", [a1k0], [w5]), + ], + }, + crate::hg! { + nodes: { + k0: "w", + b2: "w", + a0a1k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [k0], [b2]), + ("g", [b2], [a0a1k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0a1k1]), + ("g", [a0a1k1], [w5]), + ], + }, + crate::hg! { + nodes: { + a0a1k0: "w", + b2: "w", + k1: "w", + w1: "w", + w2: "w", + w3: "w", + w5: "w", + }, + edges: [ + ("f", [a0a1k0], [b2]), + ("g", [b2], [k1]), + ("f", [w1], [w2]), + ("g", [w2], [w3]), + ("f", [w1], [a0a1k0]), + ("g", [a0a1k0], [w5]), + ], + }, + ]; + + let complements = rewrite(&g, &rule, &candidate); + assert_eq!(complements.len(), 5); + let missing = expected + .iter() + .filter(|expected_graph| !complements.iter().any(|h| h == *expected_graph)) + .collect::>(); + assert!( + missing.is_empty(), + "Missing {} expected complement(s).", + missing.len() + ); + } + + #[test] + fn test_rewrite_pushout_complement_split_match_node() { + // [2] Example 1 + let (host, apex, left, right, left_map, right_map, candidate) = + example_rewrite_pushout_complement_split_match_node_input(); + let rule = Span::new(&apex, &left, &right, &left_map, &right_map); + + let complements = rewrite(&host, &rule, &candidate); + assert!(complements.len() == 61, "Found {}", complements.len()); + + let mut has_split_loop = false; + for complement in &complements { + assert_eq!(complement.edges.len(), 2); + let f_idx = complement + .edges + .iter() + .position(|label| label == "f") + .expect("f edge exists"); + let f_edge = &complement.adjacency[f_idx]; + assert_eq!(f_edge.sources.len(), 1); + assert_eq!(f_edge.targets.len(), 1); + if f_edge.sources[0] != f_edge.targets[0] { + has_split_loop = true; + } + } + + assert!(has_split_loop); + } + + fn normalize_partition( + partition: &Partition, + name_map: &HashMap, + ) -> Vec> { + let mut blocks = partition + .blocks + .iter() + .map(|block| { + let mut names = block + .elements + .iter() + .map(|node| *name_map.get(node).expect("name map")) + .collect::>(); + names.sort(); + names + }) + .collect::>(); + blocks.sort(); + blocks + } + + fn example_rewrite_input() -> ( + String, + String, + Hypergraph, + Hypergraph, + Hypergraph, + Hypergraph, + NodeEdgeMap, + NodeEdgeMap, + NodeEdgeMap, + ) { + // [1] Session 4.5 Pushout Complements and Rewriting Modulo Frobenius + let f_label = "f".to_string(); + let g_label = "g".to_string(); + + let mut g: Hypergraph = Hypergraph::empty(); + let w1 = g.new_node("w".to_string()); + let w2 = g.new_node("w".to_string()); + let w3 = g.new_node("w".to_string()); + let w4 = g.new_node("w".to_string()); + let w5 = g.new_node("w".to_string()); + + g.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w2], + }, + ); + g.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w2], + targets: vec![w3], + }, + ); + g.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![w1], + targets: vec![w4], + }, + ); + g.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![w4], + targets: vec![w5], + }, + ); + + let mut left: Hypergraph = Hypergraph::empty(); + left.new_node("w".to_string()); + + let mut right: Hypergraph = Hypergraph::empty(); + let b0 = right.new_node("w".to_string()); + let b2 = right.new_node("w".to_string()); + let b1 = right.new_node("w".to_string()); + right.new_edge( + f_label.clone(), + Hyperedge { + sources: vec![b0], + targets: vec![b2], + }, + ); + right.new_edge( + g_label.clone(), + Hyperedge { + sources: vec![b2], + targets: vec![b1], + }, + ); + + let mut apex: Hypergraph = Hypergraph::empty(); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + + let left_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0]), left.nodes.len()).unwrap(), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 2]), right.nodes.len()).unwrap(), + edges: empty_map(right.edges.len()), + }; + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![3]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; + + ( + f_label, g_label, g, apex, left, right, left_map, right_map, candidate, + ) + } + + fn example_rewrite_pushout_complement_split_match_node_input() -> ( + Hypergraph, + Hypergraph, + Hypergraph, + Hypergraph, + NodeEdgeMap, + NodeEdgeMap, + NodeEdgeMap, + ) { + // [2] Example 1 + let mut host: Hypergraph = Hypergraph::empty(); + let f_node = host.new_node("w".to_string()); + let u_node = host.new_node("w".to_string()); + host.new_edge( + "e".to_string(), + Hyperedge { + sources: vec![u_node], + targets: vec![u_node], + }, + ); + host.new_edge( + "f".to_string(), + Hyperedge { + sources: vec![f_node], + targets: vec![f_node], + }, + ); + + let mut apex: Hypergraph = Hypergraph::empty(); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + apex.new_node("w".to_string()); + + let mut left: Hypergraph = Hypergraph::empty(); + left.new_node("w".to_string()); + left.new_node("w".to_string()); + + let right = apex.clone(); + + let left_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0, 1, 1]), left.nodes.len()) + .unwrap(), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 1, 2, 3]), right.nodes.len()) + .unwrap(), + edges: empty_map(right.edges.len()), + }; + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new( + VecArray(vec![f_node.0, f_node.0]), + host.nodes.len(), + ) + .unwrap(), + edges: empty_map(host.edges.len()), + }; + + (host, apex, left, right, left_map, right_map, candidate) + } +} diff --git a/src/lax/test_utils.rs b/src/lax/test_utils.rs new file mode 100644 index 0000000..3ee1b1a --- /dev/null +++ b/src/lax/test_utils.rs @@ -0,0 +1,69 @@ +use crate::lax::{Hyperedge, Hypergraph}; + +pub(crate) fn build_hypergraph<'a>( + node_types: Vec<&'a str>, + edges: Vec<(&'a str, Vec, Vec)>, +) -> Hypergraph { + let mut graph: Hypergraph = Hypergraph::empty(); + let node_ids = node_types + .into_iter() + .map(|label| graph.new_node(label.to_string())) + .collect::>(); + + for (label, sources, targets) in edges { + let sources = sources + .into_iter() + .map(|idx| { + *node_ids + .get(idx) + .unwrap_or_else(|| panic!("edge source index {} out of range", idx)) + }) + .collect::>(); + let targets = targets + .into_iter() + .map(|idx| { + *node_ids + .get(idx) + .unwrap_or_else(|| panic!("edge target index {} out of range", idx)) + }) + .collect::>(); + graph.new_edge(label.to_string(), Hyperedge { sources, targets }); + } + + graph +} + +#[macro_export] +macro_rules! hg { + ( + nodes: { $($node:ident : $node_ty:expr),* $(,)? }, + edges: [ + $( + ($edge_label:expr, [$($src:ident),* $(,)?], [$($tgt:ident),* $(,)?]) + ),* $(,)? + ] $(,)? + ) => {{ + // Node identifiers are only for readability/debugging; they do not persist in the graph. + // Future: consider adding logical node names to Hypergraph for improved diagnostics. + let node_types = vec![$($node_ty),*]; + let node_names = vec![$(stringify!($node)),*]; + let mut node_index = std::collections::HashMap::new(); + for (idx, name) in node_names.iter().enumerate() { + node_index.insert(*name, idx); + } + let edges = vec![ + $( + ( + $edge_label, + vec![ + $( *node_index.get(stringify!($src)).expect("unknown source node") ),* + ], + vec![ + $( *node_index.get(stringify!($tgt)).expect("unknown target node") ),* + ], + ) + ),* + ]; + $crate::lax::test_utils::build_hypergraph(node_types, edges) + }}; +} diff --git a/src/lib.rs b/src/lib.rs index 876911f..76878e6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -259,7 +259,9 @@ pub mod category; pub mod finite_function; pub mod indexed_coproduct; pub mod operations; +pub mod partition; pub mod semifinite; +pub mod union_find; // Strict open hypergraphs pub mod strict; diff --git a/src/partition.rs b/src/partition.rs new file mode 100644 index 0000000..4b8204d --- /dev/null +++ b/src/partition.rs @@ -0,0 +1,176 @@ +use crate::union_find::UnionFind; + +pub struct PartitionInput { + pub elements: Vec, + // class_ids identify the class for each element. + // We search for partitions whose induced relation, together with the class relation, + // generates the total relation (i.e. everything becomes connected). + // We keep per-block class lists so we only union each class once per block, + // avoiding redundant unions when multiple elements in the same class land together. + pub class_ids: Vec, + pub class_count: usize, +} + +pub struct Partition { + pub blocks: Vec>, +} + +pub struct PartitionBlock { + pub elements: Vec, +} + +struct BlockState { + elements: Vec, + classes: Vec, +} + +pub fn enumerate_partitions(input: &PartitionInput) -> Vec> { + let mut results = Vec::new(); + let mut blocks: Vec> = Vec::new(); + let mut uf = UnionFind::new(input.class_count); + + fn all_connected(uf: &UnionFind) -> bool { + uf.components() == 1 + } + + fn walk( + idx: usize, + input: &PartitionInput, + blocks: &mut Vec>, + uf: &mut UnionFind, + results: &mut Vec>, + ) { + if idx == input.elements.len() { + if all_connected(uf) { + let blocks = blocks + .iter() + .map(|b| PartitionBlock { + elements: b.elements.clone(), + }) + .collect(); + results.push(Partition { blocks }); + } + return; + } + + let element = input.elements[idx].clone(); + let class_id = input.class_ids[idx]; + + for i in 0..blocks.len() { + let snap = uf.snapshot(); + let (elements_len, classes_len) = { + let block = &mut blocks[i]; + let elements_len = block.elements.len(); + let classes_len = block.classes.len(); + + block.elements.push(element.clone()); + if !block.classes.contains(&class_id) { + if let Some(&rep) = block.classes.first() { + uf.union(rep, class_id); + } + block.classes.push(class_id); + } + + (elements_len, classes_len) + }; + + walk(idx + 1, input, blocks, uf, results); + + uf.rollback(snap); + let block = &mut blocks[i]; + block.elements.truncate(elements_len); + block.classes.truncate(classes_len); + } + + blocks.push(BlockState { + elements: vec![element], + classes: vec![class_id], + }); + walk(idx + 1, input, blocks, uf, results); + blocks.pop(); + } + + walk(0, input, &mut blocks, &mut uf, &mut results); + results +} + +#[cfg(test)] +mod tests { + use super::{enumerate_partitions, PartitionInput}; + + fn normalize(partition: &super::Partition) -> Vec> { + let mut blocks = partition + .blocks + .iter() + .map(|block| { + let mut elems = block.elements.clone(); + elems.sort(); + elems + }) + .collect::>(); + blocks.sort(); + blocks + } + + #[test] + fn partitions_single_class_allows_all_partitions() { + let input = PartitionInput { + elements: vec![0, 1], + class_ids: vec![0, 0], + class_count: 1, + }; + + let partitions = enumerate_partitions(&input); + assert_eq!(partitions.len(), 2); + } + + #[test] + fn partitions_two_classes_require_connection() { + let input = PartitionInput { + elements: vec![0, 1], + class_ids: vec![0, 1], + class_count: 2, + }; + + let partitions = enumerate_partitions(&input); + assert_eq!(partitions.len(), 1); + assert_eq!(normalize(&partitions[0]), vec![vec![0, 1]]); + } + + #[test] + fn partitions_three_elements_two_classes() { + let input = PartitionInput { + elements: vec![0, 1, 2], + class_ids: vec![0, 0, 1], + class_count: 2, + }; + + let partitions = enumerate_partitions(&input); + assert_eq!(partitions.len(), 3); + + let mut actual = partitions.iter().map(normalize).collect::>(); + actual.sort(); + + let mut expected = vec![ + vec![vec![0, 1, 2]], + vec![vec![0, 2], vec![1]], + vec![vec![1, 2], vec![0]], + ] + .into_iter() + .map(|blocks| { + let mut blocks = blocks + .into_iter() + .map(|mut block| { + block.sort(); + block + }) + .collect::>(); + blocks.sort(); + blocks + }) + .collect::>(); + expected.sort(); + + assert_eq!(actual, expected); + } +} diff --git a/src/strict/hypergraph/mod.rs b/src/strict/hypergraph/mod.rs index 836341d..3c174ca 100644 --- a/src/strict/hypergraph/mod.rs +++ b/src/strict/hypergraph/mod.rs @@ -3,4 +3,5 @@ pub mod arrow; mod object; +pub use arrow::*; pub use object::*; diff --git a/src/union_find.rs b/src/union_find.rs new file mode 100644 index 0000000..313ba8f --- /dev/null +++ b/src/union_find.rs @@ -0,0 +1,119 @@ +#[derive(Debug, Clone)] +pub struct UnionFind { + parent: Vec, + size: Vec, + history: Vec, + components: usize, +} + +#[derive(Debug, Clone)] +enum HistoryEntry { + Noop, + Merge { + root: usize, + parent: usize, + size_parent: usize, + }, +} + +impl UnionFind { + pub fn new(n: usize) -> Self { + Self { + parent: (0..n).collect(), + size: vec![1; n], + history: Vec::new(), + components: n, + } + } + + pub fn len(&self) -> usize { + self.parent.len() + } + + pub fn components(&self) -> usize { + self.components + } + + pub fn find(&mut self, x: usize) -> usize { + let mut node = x; + while self.parent[node] != node { + node = self.parent[node]; + } + node + } + + pub fn union(&mut self, x: usize, y: usize) { + let root_x = self.find(x); + let root_y = self.find(y); + if root_x == root_y { + self.history.push(HistoryEntry::Noop); + return; + } + + let (root, parent) = if self.size[root_x] >= self.size[root_y] { + (root_y, root_x) + } else { + (root_x, root_y) + }; + + self.history.push(HistoryEntry::Merge { + root, + parent, + size_parent: self.size[parent], + }); + + self.parent[root] = parent; + self.size[parent] += self.size[root]; + self.components -= 1; + } + + pub fn snapshot(&self) -> usize { + self.history.len() + } + + pub fn rollback(&mut self, snapshot: usize) { + while self.history.len() > snapshot { + match self.history.pop().expect("rollback history") { + HistoryEntry::Noop => {} + HistoryEntry::Merge { + root, + parent, + size_parent, + } => { + self.parent[root] = root; + self.size[parent] = size_parent; + self.components += 1; + } + } + } + } +} + +#[cfg(test)] +mod tests { + use super::UnionFind; + + #[test] + fn union_find_unions_and_connects() { + let mut uf = UnionFind::new(4); + assert_eq!(uf.components(), 4); + uf.union(0, 1); + uf.union(2, 3); + assert_eq!(uf.components(), 2); + uf.union(1, 2); + assert_eq!(uf.components(), 1); + } + + #[test] + fn union_find_snapshot_and_rollback() { + let mut uf = UnionFind::new(3); + uf.union(0, 1); + let snap = uf.snapshot(); + uf.union(1, 2); + assert_eq!(uf.components(), 1); + uf.rollback(snap); + assert_eq!(uf.components(), 2); + uf.union(0, 2); + assert_eq!(uf.components(), 1); + } +} diff --git a/tests/lax/hypergraph.rs b/tests/lax/hypergraph.rs index d61be88..bfa9de8 100644 --- a/tests/lax/hypergraph.rs +++ b/tests/lax/hypergraph.rs @@ -1,4 +1,6 @@ -use open_hypergraphs::lax::{EdgeId, Hyperedge, Hypergraph, NodeId}; +use open_hypergraphs::array::vec::{VecArray, VecKind}; +use open_hypergraphs::finite_function::FiniteFunction; +use open_hypergraphs::lax::{Coproduct as _, EdgeId, Hyperedge, Hypergraph, NodeEdgeMap, NodeId}; #[test] fn test_delete_nodes_remap_and_quotient() { @@ -199,3 +201,104 @@ fn test_delete_edge_panics_on_out_of_bounds() { h.delete_edge(&[EdgeId(1)]); } + +#[test] +fn test_remainder_with_injection_splits_excluded_node_occurrences() { + let mut host = Hypergraph::empty(); + let w = host.new_node(1); + host.new_edge( + 10, + Hyperedge { + sources: vec![w], + targets: vec![w], + }, + ); + host.new_edge( + 20, + Hyperedge { + sources: vec![w], + targets: vec![w], + }, + ); + + let excluded = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![w.0]), host.nodes.len()).unwrap(), + edges: FiniteFunction::::initial(host.edges.len()), + }; + + let (remainder, remainder_in_host) = host.remainder_with_injection(&excluded); + + assert_eq!(remainder.nodes.len(), 4); + assert_eq!(remainder.edges.len(), 2); + assert_eq!(remainder.adjacency.len(), 2); + assert_eq!( + remainder_in_host.nodes.table, + VecArray(vec![w.0, w.0, w.0, w.0]) + ); + assert_eq!(remainder.adjacency[0].sources.len(), 1); + assert_eq!(remainder.adjacency[0].targets.len(), 1); + assert_eq!(remainder.adjacency[1].sources.len(), 1); + assert_eq!(remainder.adjacency[1].targets.len(), 1); + let a0s = remainder.adjacency[0].sources[0]; + let a0t = remainder.adjacency[0].targets[0]; + let a1s = remainder.adjacency[1].sources[0]; + let a1t = remainder.adjacency[1].targets[0]; + assert_ne!(a0s, a0t); + assert_ne!(a1s, a1t); + assert_ne!(a0s, a1s); + assert_ne!(a0s, a1t); + assert_ne!(a0t, a1s); + assert_ne!(a0t, a1t); +} + +#[test] +fn test_remainder_with_injection_empty_excluded_is_identity() { + let mut host = Hypergraph::empty(); + let w0 = host.new_node(1); + let w1 = host.new_node(2); + host.new_edge( + 10, + Hyperedge { + sources: vec![w0], + targets: vec![w1], + }, + ); + + let excluded = NodeEdgeMap { + nodes: FiniteFunction::::initial(host.nodes.len()), + edges: FiniteFunction::::initial(host.edges.len()), + }; + + let (remainder, remainder_in_host) = host.remainder_with_injection(&excluded); + + assert_eq!(remainder, host); + assert_eq!(remainder_in_host.nodes.table, VecArray(vec![0, 1])); + assert_eq!(remainder_in_host.edges.table, VecArray(vec![0])); +} + +#[test] +fn test_remainder_with_injection_excluded_edge_drops_edge_only() { + let mut host = Hypergraph::empty(); + let w0 = host.new_node(1); + let w1 = host.new_node(2); + host.new_edge( + 10, + Hyperedge { + sources: vec![w0], + targets: vec![w1], + }, + ); + + let excluded = NodeEdgeMap { + nodes: FiniteFunction::::initial(host.nodes.len()), + edges: FiniteFunction::::new(VecArray(vec![0]), host.edges.len()).unwrap(), + }; + + let (remainder, remainder_in_host) = host.remainder_with_injection(&excluded); + + assert_eq!(remainder.nodes, host.nodes); + assert!(remainder.edges.is_empty()); + assert!(remainder.adjacency.is_empty()); + assert_eq!(remainder_in_host.nodes.table, VecArray(vec![0, 1])); + assert!(remainder_in_host.edges.table.is_empty()); +} diff --git a/tests/lax/mod.rs b/tests/lax/mod.rs index a3a11c8..4f3250b 100644 --- a/tests/lax/mod.rs +++ b/tests/lax/mod.rs @@ -1,2 +1,3 @@ pub mod eval; pub mod hypergraph; +pub mod rewrite; diff --git a/tests/lax/rewrite.rs b/tests/lax/rewrite.rs new file mode 100644 index 0000000..709b5a4 --- /dev/null +++ b/tests/lax/rewrite.rs @@ -0,0 +1,95 @@ +use open_hypergraphs::array::vec::{VecArray, VecKind}; +use open_hypergraphs::finite_function::FiniteFunction; +use open_hypergraphs::lax::{rewrite, Hyperedge, Hypergraph, NodeEdgeMap, Span}; + +fn empty_map(target: usize) -> FiniteFunction { + FiniteFunction::::new(VecArray(vec![]), target).unwrap() +} + +fn span_with_empty_apex( + left: &Hypergraph, + right: &Hypergraph, +) -> (Hypergraph, NodeEdgeMap, NodeEdgeMap) { + let apex: Hypergraph = Hypergraph::empty(); + let left_map = NodeEdgeMap { + nodes: empty_map(left.nodes.len()), + edges: empty_map(left.edges.len()), + }; + let right_map = NodeEdgeMap { + nodes: empty_map(right.nodes.len()), + edges: empty_map(right.edges.len()), + }; + (apex, left_map, right_map) +} + +#[test] +fn test_rewrite_identification_fails() { + // K = ∅, L = {a, b}. G has one node w and m(a) = m(b) = w. + let mut l: Hypergraph = Hypergraph::empty(); + l.new_node(1); + l.new_node(1); + + let r: Hypergraph = Hypergraph::empty(); + let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); + + let mut g: Hypergraph = Hypergraph::empty(); + g.new_node(1); + + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0, 0]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; + + assert!(rewrite(&g, &rule, &candidate).is_empty()); +} + +#[test] +fn test_rewrite_dangling_fails() { + // K = ∅, L = {u}. G = {v} with a loop edge e: v -> v, and m(u) = v. + let mut l: Hypergraph = Hypergraph::empty(); + l.new_node(1); + + let r: Hypergraph = Hypergraph::empty(); + let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); + + let mut g: Hypergraph = Hypergraph::empty(); + let v = g.new_node(1); + g.new_edge( + 10, + Hyperedge { + sources: vec![v], + targets: vec![v], + }, + ); + + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; + + assert!(rewrite(&g, &rule, &candidate).is_empty()); +} + +#[test] +fn test_rewrite_gluing_ok() { + // K = ∅, L = {u}. G = {v} with no edges, and m(u) = v. + let mut l: Hypergraph = Hypergraph::empty(); + l.new_node(1); + + let r: Hypergraph = Hypergraph::empty(); + let (apex, left_map, right_map) = span_with_empty_apex(&l, &r); + let rule = Span::new(&apex, &l, &r, &left_map, &right_map); + + let mut g: Hypergraph = Hypergraph::empty(); + g.new_node(1); + + let candidate = NodeEdgeMap { + nodes: FiniteFunction::::new(VecArray(vec![0]), g.nodes.len()).unwrap(), + edges: empty_map(g.edges.len()), + }; + + let complements = rewrite(&g, &rule, &candidate); + assert!(!complements.is_empty()); +}