From 5995f69cae8f4b65590c4d7c0dd38a0e2a5baead Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 20 Jan 2026 11:02:34 +0100 Subject: [PATCH 01/16] basic vf2 with backtracking --- src/lax/hypergraph.rs | 1 + src/lax/mod.rs | 1 + src/lax/subgraph.rs | 345 ++++++++++++++++++++++++++++++++++++++++++ tests/lax/mod.rs | 1 + tests/lax/subgraph.rs | 68 +++++++++ 5 files changed, 416 insertions(+) create mode 100644 src/lax/subgraph.rs create mode 100644 tests/lax/subgraph.rs diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 08f1a06..ffa2a17 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -280,6 +280,7 @@ impl Hypergraph { } } + impl Hypergraph { /// Construct a [`Hypergraph`] by identifying nodes in the quotient map. /// Mutably quotient this [`Hypergraph`], returning the coequalizer calculated from `self.quotient`. diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 4cf746c..1dd8b9b 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -74,6 +74,7 @@ pub mod category; pub mod functor; pub mod hypergraph; +pub mod subgraph; pub mod mut_category; pub mod open_hypergraph; diff --git a/src/lax/subgraph.rs b/src/lax/subgraph.rs new file mode 100644 index 0000000..87bdc5c --- /dev/null +++ b/src/lax/subgraph.rs @@ -0,0 +1,345 @@ +use super::hypergraph::{EdgeId, Hypergraph, NodeId}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SubgraphIsomorphism { + node_map: Vec, + edge_map: Vec, +} + +impl SubgraphIsomorphism { + pub fn node_map(&self) -> &[NodeId] { + &self.node_map + } + + pub fn edge_map(&self) -> &[EdgeId] { + &self.edge_map + } +} + +impl Hypergraph { + /// Find all subgraph isomorphisms from `pattern` into `self`. + /// + /// This uses a VF2-style backtracking search over edges, then assigns any isolated nodes. + /// The quotient map is ignored; run `quotient` first if you want strict matching. + pub fn find_subgraph_isomorphisms_by( + &self, + pattern: &Hypergraph, + node_eq: FN, + edge_eq: FE, + ) -> Vec + where + FN: Fn(&OP, &O) -> bool, + FE: Fn(&AP, &A) -> bool, + { + find_subgraph_isomorphisms_impl(self, pattern, &node_eq, &edge_eq) + } +} + +impl Hypergraph { + /// Find all subgraph isomorphisms from `pattern` into `self` by label equality. + pub fn find_subgraph_isomorphisms( + &self, + pattern: &Hypergraph, + ) -> Vec { + self.find_subgraph_isomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) + } +} + +fn find_subgraph_isomorphisms_impl( + target: &Hypergraph, + pattern: &Hypergraph, + node_eq: &FN, + edge_eq: &FE, +) -> Vec +where + FN: Fn(&OP, &O) -> bool, + FE: Fn(&AP, &A) -> bool, +{ + use std::cmp::Reverse; + + // Quick cardinality check before doing any work. + if pattern.nodes.len() > target.nodes.len() || pattern.edges.len() > target.edges.len() { + return Vec::new(); + } + + // Precompute candidate target edges for each pattern edge. + // `edge_candidates[p]` is the list of target edge indices that match `p` by label and arity. + // Rationale: pruning by label/arity early shrinks the search tree dramatically, which is + // the key idea behind VF2-style matching. + let mut edge_candidates = Vec::with_capacity(pattern.edges.len()); + for (p_edge_idx, p_label) in pattern.edges.iter().enumerate() { + let p_adj = &pattern.adjacency[p_edge_idx]; + let mut candidates = Vec::new(); + for (t_edge_idx, t_label) in target.edges.iter().enumerate() { + if !edge_eq(p_label, t_label) { + continue; + } + let t_adj = &target.adjacency[t_edge_idx]; + if p_adj.sources.len() != t_adj.sources.len() + || p_adj.targets.len() != t_adj.targets.len() + { + continue; + } + candidates.push(t_edge_idx); + } + if candidates.is_empty() && !pattern.edges.is_empty() { + return Vec::new(); + } + edge_candidates.push(candidates); + } + + // Explore edges with fewer candidates first (and higher arity as a tie-breaker). + // Rationale: "fail fast" ordering reduces backtracking when constraints are tight. + let mut edge_order: Vec = (0..pattern.edges.len()).collect(); + edge_order.sort_by_key(|&edge_idx| { + let arity = + pattern.adjacency[edge_idx].sources.len() + pattern.adjacency[edge_idx].targets.len(); + (edge_candidates[edge_idx].len(), Reverse(arity)) + }); + + // Track isolated nodes so we can assign them after edge mapping. + // Rationale: edge constraints are strongest; assigning isolated nodes earlier just + // multiplies possibilities without adding pruning power. + let mut node_in_edge = vec![false; pattern.nodes.len()]; + for edge in &pattern.adjacency { + for node in edge.sources.iter().chain(edge.targets.iter()) { + node_in_edge[node.0] = true; + } + } + let isolated_nodes: Vec = node_in_edge + .iter() + .enumerate() + .filter_map(|(idx, used)| if *used { None } else { Some(idx) }) + .collect(); + + // Mutable state for the backtracking search. + // Rationale: we mutate maps/used flags in-place and roll back to avoid repeated allocation. + let mut matches = Vec::new(); + let mut node_map = vec![None; pattern.nodes.len()]; + let mut edge_map = vec![None; pattern.edges.len()]; + let mut used_target_nodes = vec![false; target.nodes.len()]; + let mut used_target_edges = vec![false; target.edges.len()]; + + backtrack_edges( + target, + pattern, + node_eq, + &edge_order, + &edge_candidates, + 0, + &isolated_nodes, + &mut node_map, + &mut edge_map, + &mut used_target_nodes, + &mut used_target_edges, + &mut matches, + ); + + matches +} + +#[allow(clippy::too_many_arguments)] +fn backtrack_edges( + target: &Hypergraph, + pattern: &Hypergraph, + node_eq: &FN, + edge_order: &[usize], + edge_candidates: &[Vec], + edge_index: usize, + isolated_nodes: &[usize], + node_map: &mut Vec>, + edge_map: &mut Vec>, + used_target_nodes: &mut Vec, + used_target_edges: &mut Vec, + matches: &mut Vec, +) where + FN: Fn(&OP, &O) -> bool, +{ + // If all edges are mapped, fill in remaining isolated nodes. + // Rationale: at this point only label/injectivity constraints remain. + if edge_index == edge_order.len() { + backtrack_isolated_nodes( + target, + pattern, + node_eq, + isolated_nodes, + 0, + node_map, + edge_map, + used_target_nodes, + matches, + ); + return; + } + + let p_edge_idx = edge_order[edge_index]; + let p_adj = &pattern.adjacency[p_edge_idx]; + + for &t_edge_idx in &edge_candidates[p_edge_idx] { + if used_target_edges[t_edge_idx] { + continue; + } + let t_adj = &target.adjacency[t_edge_idx]; + + // Optimistically map nodes along this edge; rollback on failure. + // Rationale: we try to extend the partial mapping and undo if any constraint breaks. + let mut newly_mapped = Vec::new(); + let mut ok = true; + + for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { + if !try_map_node( + target, + pattern, + node_eq, + p_node.0, + t_node.0, + node_map, + used_target_nodes, + &mut newly_mapped, + ) { + ok = false; + break; + } + } + + if ok { + for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { + if !try_map_node( + target, + pattern, + node_eq, + p_node.0, + t_node.0, + node_map, + used_target_nodes, + &mut newly_mapped, + ) { + ok = false; + break; + } + } + } + + if ok { + used_target_edges[t_edge_idx] = true; + edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); + + backtrack_edges( + target, + pattern, + node_eq, + edge_order, + edge_candidates, + edge_index + 1, + isolated_nodes, + node_map, + edge_map, + used_target_nodes, + used_target_edges, + matches, + ); + + edge_map[p_edge_idx] = None; + used_target_edges[t_edge_idx] = false; + } + + // Revert any provisional node mappings for this candidate edge. + // Rationale: keep the search state consistent for the next candidate. + for p_node_idx in newly_mapped.drain(..) { + let t_node_idx = node_map[p_node_idx].unwrap().0; + node_map[p_node_idx] = None; + used_target_nodes[t_node_idx] = false; + } + } +} + +fn backtrack_isolated_nodes( + target: &Hypergraph, + pattern: &Hypergraph, + node_eq: &FN, + isolated_nodes: &[usize], + idx: usize, + node_map: &mut Vec>, + edge_map: &mut Vec>, + used_target_nodes: &mut Vec, + matches: &mut Vec, +) where + FN: Fn(&OP, &O) -> bool, +{ + // All isolated nodes assigned; record a complete match. + // Rationale: both edge and node constraints are satisfied at this point. + if idx == isolated_nodes.len() { + let node_map = node_map + .iter() + .map(|node| node.expect("pattern nodes must be mapped")) + .collect(); + let edge_map = edge_map + .iter() + .map(|edge| edge.expect("pattern edges must be mapped")) + .collect(); + matches.push(SubgraphIsomorphism { node_map, edge_map }); + return; + } + + let p_node_idx = isolated_nodes[idx]; + for t_node_idx in 0..target.nodes.len() { + if used_target_nodes[t_node_idx] { + continue; + } + if !node_eq(&pattern.nodes[p_node_idx], &target.nodes[t_node_idx]) { + continue; + } + + node_map[p_node_idx] = Some(NodeId(t_node_idx)); + used_target_nodes[t_node_idx] = true; + + backtrack_isolated_nodes( + target, + pattern, + node_eq, + isolated_nodes, + idx + 1, + node_map, + edge_map, + used_target_nodes, + matches, + ); + + used_target_nodes[t_node_idx] = false; + node_map[p_node_idx] = None; + } +} + +fn try_map_node( + target: &Hypergraph, + pattern: &Hypergraph, + node_eq: &FN, + p_node_idx: usize, + t_node_idx: usize, + node_map: &mut Vec>, + used_target_nodes: &mut Vec, + newly_mapped: &mut Vec, +) -> bool +where + FN: Fn(&OP, &O) -> bool, +{ + // Try to extend the node mapping with (pattern_node -> target_node). + // If the pattern node is already mapped, this only succeeds when it maps to the same target. + // Otherwise, it checks injectivity and label compatibility before recording the mapping. + if let Some(existing) = node_map[p_node_idx] { + return existing.0 == t_node_idx; + } + // injectivity: a target node can only be used once + if used_target_nodes[t_node_idx] { + return false; + } + // label compatibility: we only allow mapping when the node labels match + if !node_eq(&pattern.nodes[p_node_idx], &target.nodes[t_node_idx]) { + return false; + } + + node_map[p_node_idx] = Some(NodeId(t_node_idx)); + used_target_nodes[t_node_idx] = true; + newly_mapped.push(p_node_idx); + true +} diff --git a/tests/lax/mod.rs b/tests/lax/mod.rs index a3a11c8..21e76f4 100644 --- a/tests/lax/mod.rs +++ b/tests/lax/mod.rs @@ -1,2 +1,3 @@ pub mod eval; pub mod hypergraph; +pub mod subgraph; diff --git a/tests/lax/subgraph.rs b/tests/lax/subgraph.rs new file mode 100644 index 0000000..6e99e74 --- /dev/null +++ b/tests/lax/subgraph.rs @@ -0,0 +1,68 @@ +use open_hypergraphs::lax::{EdgeId, Hyperedge, Hypergraph, NodeId}; + +#[test] +fn test_subgraph_isomorphisms_single_edge() { + let mut target = Hypergraph::empty(); + let t0 = target.new_node(0); + let t1 = target.new_node(1); + let t2 = target.new_node(0); + target.new_edge('f', (vec![t0], vec![t1])); + target.new_edge('f', (vec![t2], vec![t1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + pattern.new_edge('f', (vec![p0], vec![p1])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 2); + assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); + + let mut sources = matches.iter().map(|m| m.node_map()[0].0).collect::>(); + sources.sort(); + assert_eq!(sources, vec![0, 2]); + + for m in matches { + if m.node_map()[0] == NodeId(0) { + assert_eq!(m.edge_map()[0], EdgeId(0)); + } else { + assert_eq!(m.edge_map()[0], EdgeId(1)); + } + } +} + +#[test] +fn test_subgraph_isomorphisms_order_sensitive() { + let mut target = Hypergraph::empty(); + let t0 = target.new_node(0); + let t1 = target.new_node(1); + target.new_edge('f', (vec![t0, t1], vec![])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + pattern.new_edge('f', (vec![p1, p0], vec![])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert!(matches.is_empty()); +} + +#[test] +fn test_subgraph_isomorphisms_isolated_nodes() { + let mut target: Hypergraph = Hypergraph::empty(); + target.new_node(1); + target.new_node(2); + target.new_node(1); + + let mut pattern: Hypergraph = Hypergraph::empty(); + pattern.new_node(1); + pattern.new_node(2); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 2); + assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); + + let mut sources = matches.iter().map(|m| m.node_map()[0].0).collect::>(); + sources.sort(); + assert_eq!(sources, vec![0, 2]); +} From 33d149eb3b37abae00d5c789b3d9f1bec5a69202 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 20 Jan 2026 11:14:22 +0100 Subject: [PATCH 02/16] in/out pruning --- src/lax/subgraph.rs | 239 +++++++++++++++++++++++++++++++++++++++++- tests/lax/subgraph.rs | 80 +++++++++++++- 2 files changed, 315 insertions(+), 4 deletions(-) diff --git a/src/lax/subgraph.rs b/src/lax/subgraph.rs index 87bdc5c..c58c31f 100644 --- a/src/lax/subgraph.rs +++ b/src/lax/subgraph.rs @@ -112,6 +112,13 @@ where .filter_map(|(idx, used)| if *used { None } else { Some(idx) }) .collect(); + // VF2 feasibility checks used here: + // - Degree compatibility: in/out degree of each pattern node must not exceed the target's. + // - Frontier capacity: remaining unmapped incident edges on a pattern node must fit within + // the remaining incident capacity on its mapped target node. + let (pattern_in, pattern_out) = node_degrees(pattern); + let (target_in, target_out) = node_degrees(target); + // Mutable state for the backtracking search. // Rationale: we mutate maps/used flags in-place and roll back to avoid repeated allocation. let mut matches = Vec::new(); @@ -119,6 +126,10 @@ where let mut edge_map = vec![None; pattern.edges.len()]; let mut used_target_nodes = vec![false; target.nodes.len()]; let mut used_target_edges = vec![false; target.edges.len()]; + let mut pattern_mapped_in = vec![0usize; pattern.nodes.len()]; + let mut pattern_mapped_out = vec![0usize; pattern.nodes.len()]; + let mut target_mapped_in = vec![0usize; target.nodes.len()]; + let mut target_mapped_out = vec![0usize; target.nodes.len()]; backtrack_edges( target, @@ -132,6 +143,14 @@ where &mut edge_map, &mut used_target_nodes, &mut used_target_edges, + &pattern_in, + &pattern_out, + &target_in, + &target_out, + &mut pattern_mapped_in, + &mut pattern_mapped_out, + &mut target_mapped_in, + &mut target_mapped_out, &mut matches, ); @@ -151,6 +170,14 @@ fn backtrack_edges( edge_map: &mut Vec>, used_target_nodes: &mut Vec, used_target_edges: &mut Vec, + pattern_in: &[usize], + pattern_out: &[usize], + target_in: &[usize], + target_out: &[usize], + pattern_mapped_in: &mut [usize], + pattern_mapped_out: &mut [usize], + target_mapped_in: &mut [usize], + target_mapped_out: &mut [usize], matches: &mut Vec, ) where FN: Fn(&OP, &O) -> bool, @@ -167,6 +194,14 @@ fn backtrack_edges( node_map, edge_map, used_target_nodes, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, matches, ); return; @@ -193,8 +228,18 @@ fn backtrack_edges( node_eq, p_node.0, t_node.0, + 0, + 1, node_map, used_target_nodes, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, &mut newly_mapped, ) { ok = false; @@ -210,8 +255,18 @@ fn backtrack_edges( node_eq, p_node.0, t_node.0, + 1, + 0, node_map, used_target_nodes, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, &mut newly_mapped, ) { ok = false; @@ -223,6 +278,20 @@ fn backtrack_edges( if ok { used_target_edges[t_edge_idx] = true; edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); + apply_edge_incidence( + &p_adj.sources, + &p_adj.targets, + pattern_mapped_in, + pattern_mapped_out, + 1, + ); + apply_edge_incidence( + &t_adj.sources, + &t_adj.targets, + target_mapped_in, + target_mapped_out, + 1, + ); backtrack_edges( target, @@ -236,11 +305,33 @@ fn backtrack_edges( edge_map, used_target_nodes, used_target_edges, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, matches, ); edge_map[p_edge_idx] = None; used_target_edges[t_edge_idx] = false; + apply_edge_incidence( + &p_adj.sources, + &p_adj.targets, + pattern_mapped_in, + pattern_mapped_out, + -1, + ); + apply_edge_incidence( + &t_adj.sources, + &t_adj.targets, + target_mapped_in, + target_mapped_out, + -1, + ); } // Revert any provisional node mappings for this candidate edge. @@ -262,6 +353,14 @@ fn backtrack_isolated_nodes( node_map: &mut Vec>, edge_map: &mut Vec>, used_target_nodes: &mut Vec, + pattern_in: &[usize], + pattern_out: &[usize], + target_in: &[usize], + target_out: &[usize], + pattern_mapped_in: &mut [usize], + pattern_mapped_out: &mut [usize], + target_mapped_in: &mut [usize], + target_mapped_out: &mut [usize], matches: &mut Vec, ) where FN: Fn(&OP, &O) -> bool, @@ -286,6 +385,22 @@ fn backtrack_isolated_nodes( if used_target_nodes[t_node_idx] { continue; } + if !degree_feasible( + p_node_idx, + t_node_idx, + 0, + 0, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, + ) { + continue; + } if !node_eq(&pattern.nodes[p_node_idx], &target.nodes[t_node_idx]) { continue; } @@ -302,6 +417,14 @@ fn backtrack_isolated_nodes( node_map, edge_map, used_target_nodes, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, matches, ); @@ -316,8 +439,18 @@ fn try_map_node( node_eq: &FN, p_node_idx: usize, t_node_idx: usize, + add_in: usize, + add_out: usize, node_map: &mut Vec>, used_target_nodes: &mut Vec, + pattern_in: &[usize], + pattern_out: &[usize], + target_in: &[usize], + target_out: &[usize], + pattern_mapped_in: &mut [usize], + pattern_mapped_out: &mut [usize], + target_mapped_in: &mut [usize], + target_mapped_out: &mut [usize], newly_mapped: &mut Vec, ) -> bool where @@ -327,7 +460,23 @@ where // If the pattern node is already mapped, this only succeeds when it maps to the same target. // Otherwise, it checks injectivity and label compatibility before recording the mapping. if let Some(existing) = node_map[p_node_idx] { - return existing.0 == t_node_idx; + if existing.0 != t_node_idx { + return false; + } + return degree_feasible( + p_node_idx, + t_node_idx, + add_in, + add_out, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, + ); } // injectivity: a target node can only be used once if used_target_nodes[t_node_idx] { @@ -337,9 +486,97 @@ where if !node_eq(&pattern.nodes[p_node_idx], &target.nodes[t_node_idx]) { return false; } + if !degree_feasible( + p_node_idx, + t_node_idx, + add_in, + add_out, + pattern_in, + pattern_out, + target_in, + target_out, + pattern_mapped_in, + pattern_mapped_out, + target_mapped_in, + target_mapped_out, + ) { + return false; + } node_map[p_node_idx] = Some(NodeId(t_node_idx)); used_target_nodes[t_node_idx] = true; newly_mapped.push(p_node_idx); true } + +fn node_degrees(graph: &Hypergraph) -> (Vec, Vec) { + let mut in_deg = vec![0usize; graph.nodes.len()]; + let mut out_deg = vec![0usize; graph.nodes.len()]; + for edge in &graph.adjacency { + for node in &edge.sources { + out_deg[node.0] += 1; + } + for node in &edge.targets { + in_deg[node.0] += 1; + } + } + (in_deg, out_deg) +} + +fn apply_edge_incidence( + sources: &[NodeId], + targets: &[NodeId], + mapped_in: &mut [usize], + mapped_out: &mut [usize], + delta: i32, +) { + if delta >= 0 { + let add = delta as usize; + for node in sources { + mapped_out[node.0] += add; + } + for node in targets { + mapped_in[node.0] += add; + } + } else { + let sub = (-delta) as usize; + for node in sources { + mapped_out[node.0] -= sub; + } + for node in targets { + mapped_in[node.0] -= sub; + } + } +} + +fn degree_feasible( + p_node_idx: usize, + t_node_idx: usize, + add_in: usize, + add_out: usize, + pattern_in: &[usize], + pattern_out: &[usize], + target_in: &[usize], + target_out: &[usize], + pattern_mapped_in: &[usize], + pattern_mapped_out: &[usize], + target_mapped_in: &[usize], + target_mapped_out: &[usize], +) -> bool { + if pattern_in[p_node_idx] > target_in[t_node_idx] + || pattern_out[p_node_idx] > target_out[t_node_idx] + { + return false; + } + + let pattern_remaining_in = pattern_in[p_node_idx] + .saturating_sub(pattern_mapped_in[p_node_idx] + add_in); + let pattern_remaining_out = pattern_out[p_node_idx] + .saturating_sub(pattern_mapped_out[p_node_idx] + add_out); + let target_remaining_in = target_in[t_node_idx] + .saturating_sub(target_mapped_in[t_node_idx] + add_in); + let target_remaining_out = target_out[t_node_idx] + .saturating_sub(target_mapped_out[t_node_idx] + add_out); + + pattern_remaining_in <= target_remaining_in && pattern_remaining_out <= target_remaining_out +} diff --git a/tests/lax/subgraph.rs b/tests/lax/subgraph.rs index 6e99e74..da7889e 100644 --- a/tests/lax/subgraph.rs +++ b/tests/lax/subgraph.rs @@ -1,4 +1,4 @@ -use open_hypergraphs::lax::{EdgeId, Hyperedge, Hypergraph, NodeId}; +use open_hypergraphs::lax::{EdgeId, Hypergraph, NodeId}; #[test] fn test_subgraph_isomorphisms_single_edge() { @@ -18,7 +18,10 @@ fn test_subgraph_isomorphisms_single_edge() { assert_eq!(matches.len(), 2); assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); - let mut sources = matches.iter().map(|m| m.node_map()[0].0).collect::>(); + let mut sources = matches + .iter() + .map(|m| m.node_map()[0].0) + .collect::>(); sources.sort(); assert_eq!(sources, vec![0, 2]); @@ -62,7 +65,78 @@ fn test_subgraph_isomorphisms_isolated_nodes() { assert_eq!(matches.len(), 2); assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); - let mut sources = matches.iter().map(|m| m.node_map()[0].0).collect::>(); + let mut sources = matches + .iter() + .map(|m| m.node_map()[0].0) + .collect::>(); sources.sort(); assert_eq!(sources, vec![0, 2]); } + +#[test] +fn test_subgraph_isomorphisms_shared_nodes() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(1); + let n2 = target.new_node(2); + target.new_edge('g', (vec![n0], vec![n1])); + target.new_edge('h', (vec![n1], vec![n2])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + let p2 = pattern.new_node(2); + pattern.new_edge('g', (vec![p0], vec![p1])); + pattern.new_edge('h', (vec![p1], vec![p2])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 1); +} + +#[test] +fn test_subgraph_isomorphisms_arity_mismatch() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(1); + target.new_edge('f', (vec![n0], vec![n1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + let p2 = pattern.new_node(2); + pattern.new_edge('f', (vec![p0, p1], vec![p2])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert!(matches.is_empty()); +} + +#[test] +fn test_subgraph_isomorphisms_degree_feasible_prune() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(1); + target.new_edge('a', (vec![n0], vec![n1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + let p2 = pattern.new_node(2); + pattern.new_edge('a', (vec![p0], vec![p1])); + pattern.new_edge('b', (vec![p0], vec![p2])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert!(matches.is_empty()); +} + +#[test] +fn test_subgraph_isomorphisms_empty_pattern() { + let mut target = Hypergraph::empty(); + target.new_node(1); + target.new_node(2); + + let pattern: Hypergraph = Hypergraph::empty(); + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 1); + assert!(matches[0].node_map().is_empty()); + assert!(matches[0].edge_map().is_empty()); +} From 4f796bbfc49ce06264472683b40eb22f2c93b0f3 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 20 Jan 2026 11:41:40 +0100 Subject: [PATCH 03/16] more tests --- tests/lax/subgraph.rs | 94 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/tests/lax/subgraph.rs b/tests/lax/subgraph.rs index da7889e..861a6b2 100644 --- a/tests/lax/subgraph.rs +++ b/tests/lax/subgraph.rs @@ -140,3 +140,97 @@ fn test_subgraph_isomorphisms_empty_pattern() { assert!(matches[0].node_map().is_empty()); assert!(matches[0].edge_map().is_empty()); } + +#[test] +fn test_subgraph_isomorphisms_multi_incidence_sources() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(1); + target.new_edge('f', (vec![n0, n0], vec![n1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + pattern.new_edge('f', (vec![p0, p0], vec![p1])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 1); + assert_eq!(matches[0].node_map()[0], n0); + assert_eq!(matches[0].node_map()[1], n1); + assert_eq!(matches[0].edge_map()[0], EdgeId(0)); +} + +#[test] +fn test_subgraph_isomorphisms_node_in_sources_and_targets() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + target.new_edge('g', (vec![n0], vec![n0])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + pattern.new_edge('g', (vec![p0], vec![p0])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 1); + assert_eq!(matches[0].node_map()[0], n0); + assert_eq!(matches[0].edge_map()[0], EdgeId(0)); +} + +#[test] +fn test_subgraph_isomorphisms_identical_edges_injective() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(1); + target.new_edge('h', (vec![n0], vec![n1])); + target.new_edge('h', (vec![n0], vec![n1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + pattern.new_edge('h', (vec![p0], vec![p1])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 2); + let mut edge_ids = matches.iter().map(|m| m.edge_map()[0].0).collect::>(); + edge_ids.sort(); + assert_eq!(edge_ids, vec![0, 1]); +} + +#[test] +fn test_subgraph_isomorphisms_two_identical_edges_bijective() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(1); + target.new_edge('h', (vec![n0], vec![n1])); + target.new_edge('h', (vec![n0], vec![n1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + pattern.new_edge('h', (vec![p0], vec![p1])); + pattern.new_edge('h', (vec![p0], vec![p1])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 2); + let mut edge_maps = matches + .iter() + .map(|m| (m.edge_map()[0].0, m.edge_map()[1].0)) + .collect::>(); + edge_maps.sort(); + assert_eq!(edge_maps, vec![(0, 1), (1, 0)]); +} + +#[test] +fn test_subgraph_isomorphisms_isolated_nodes_duplicate_labels() { + let mut target: Hypergraph = Hypergraph::empty(); + target.new_node(1); + target.new_node(1); + target.new_node(1); + + let mut pattern: Hypergraph = Hypergraph::empty(); + pattern.new_node(1); + pattern.new_node(1); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 6); +} From 1a1a3e3be0014349a61b9b9c610790d246d53038 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 20 Jan 2026 16:36:55 +0100 Subject: [PATCH 04/16] more tests --- tests/lax/subgraph.rs | 33 ++++++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/tests/lax/subgraph.rs b/tests/lax/subgraph.rs index 861a6b2..ed0705b 100644 --- a/tests/lax/subgraph.rs +++ b/tests/lax/subgraph.rs @@ -62,6 +62,7 @@ fn test_subgraph_isomorphisms_isolated_nodes() { pattern.new_node(2); let matches = target.find_subgraph_isomorphisms(&pattern); + // The pattern's 2-label must map to the unique 2 in the target; the 1-label can map to either 1. assert_eq!(matches.len(), 2); assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); @@ -160,6 +161,33 @@ fn test_subgraph_isomorphisms_multi_incidence_sources() { assert_eq!(matches[0].edge_map()[0], EdgeId(0)); } +#[test] +fn test_subgraph_isomorphisms_multiple_matches_complex_target() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(0); + let n2 = target.new_node(1); + let n3 = target.new_node(1); + let n4 = target.new_node(2); + target.new_edge('f', (vec![n0], vec![n2])); + target.new_edge('f', (vec![n0], vec![n3])); + target.new_edge('f', (vec![n1], vec![n2])); + target.new_edge('f', (vec![n1], vec![n3])); + target.new_edge('g', (vec![n2], vec![n4])); + target.new_edge('g', (vec![n3], vec![n4])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(1); + let p2 = pattern.new_node(2); + pattern.new_edge('f', (vec![p0], vec![p1])); + pattern.new_edge('g', (vec![p1], vec![p2])); + + let matches = target.find_subgraph_isomorphisms(&pattern); + assert_eq!(matches.len(), 4); + assert!(matches.iter().all(|m| m.node_map()[2] == n4)); +} + #[test] fn test_subgraph_isomorphisms_node_in_sources_and_targets() { let mut target = Hypergraph::empty(); @@ -191,7 +219,10 @@ fn test_subgraph_isomorphisms_identical_edges_injective() { let matches = target.find_subgraph_isomorphisms(&pattern); assert_eq!(matches.len(), 2); - let mut edge_ids = matches.iter().map(|m| m.edge_map()[0].0).collect::>(); + let mut edge_ids = matches + .iter() + .map(|m| m.edge_map()[0].0) + .collect::>(); edge_ids.sort(); assert_eq!(edge_ids, vec![0, 1]); } From e417caf35961f69854b958a34100ec8d1f9bc905 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 16:56:41 +0100 Subject: [PATCH 05/16] wip: exploring csp --- src/lax/csp.rs | 149 +++++++++++ src/lax/mod.rs | 1 + src/lax/subgraph.rs | 601 +++++++++++--------------------------------- 3 files changed, 301 insertions(+), 450 deletions(-) create mode 100644 src/lax/csp.rs diff --git a/src/lax/csp.rs b/src/lax/csp.rs new file mode 100644 index 0000000..598c9b8 --- /dev/null +++ b/src/lax/csp.rs @@ -0,0 +1,149 @@ +// A tiny, reusable CSP (Constraint Satisfaction Problem) engine. +// +// This is intentionally minimal: it provides variables with finite domains, +// a few constraint kinds, and a backtracking search that enumerates all solutions. +// The design keeps extension points (new constraints, search heuristics) simple. + +pub type VarId = usize; + +pub struct Csp<'a> { + // Candidate values for each variable (indexed by VarId). + domains: Vec>, + // All constraints in the model; constraints reference variables by VarId. + constraints: Vec>, + // Reverse index so we can check only constraints that mention a variable. + constraints_by_var: Vec>, +} + +impl<'a> Csp<'a> { + pub fn new() -> Self { + Self { + domains: Vec::new(), + constraints: Vec::new(), + constraints_by_var: Vec::new(), + } + } + + pub fn add_var(&mut self, domain: Vec) -> VarId { + // Register a new variable and its allowed values. + let id = self.domains.len(); + self.domains.push(domain); + self.constraints_by_var.push(Vec::new()); + id + } + + pub fn add_all_different(&mut self, vars: Vec) { + // Enforce pairwise inequality across the given variables. + self.add_constraint(vars, ConstraintKind::AllDifferent); + } + + pub fn add_predicate(&mut self, vars: Vec, predicate: F) + where + F: Fn(&[Option]) -> bool + 'a, + { + // Add an n-ary constraint that inspects the partial assignment. + self.add_constraint(vars, ConstraintKind::Predicate(Box::new(predicate))); + } + + pub fn add_constraint(&mut self, vars: Vec, kind: ConstraintKind<'a>) { + // Store the constraint and index it by each referenced variable. + if matches!(kind, ConstraintKind::AllDifferent) && vars.len() <= 1 { + return; + } + let idx = self.constraints.len(); + self.constraints.push(Constraint { vars, kind }); + for &var in &self.constraints[idx].vars { + self.constraints_by_var[var].push(idx); + } + } + + pub fn solve_all(&self) -> Vec> { + // Depth-first search over assignments; enumerates all solutions. + let mut solutions = Vec::new(); + let mut assignment = vec![None; self.domains.len()]; + self.backtrack(&mut assignment, &mut solutions); + solutions + } + + fn backtrack(&self, assignment: &mut [Option], solutions: &mut Vec>) { + let Some(var) = self.select_unassigned_var(assignment) else { + // All variables assigned: record a concrete solution. + let solution = assignment + .iter() + .map(|value| value.expect("all variables assigned")) + .collect(); + solutions.push(solution); + return; + }; + + for &value in &self.domains[var] { + assignment[var] = Some(value); + if self.consistent(var, assignment) { + self.backtrack(assignment, solutions); + } + assignment[var] = None; + } + } + + // Smallest-domain-first heuristic (MRV) to fail fast. + fn select_unassigned_var(&self, assignment: &[Option]) -> Option { + let mut best: Option<(usize, VarId)> = None; + for (var, value) in assignment.iter().enumerate() { + if value.is_some() { + continue; + } + let domain_len = self.domains[var].len(); + match best { + None => best = Some((domain_len, var)), + Some((best_len, _)) if domain_len < best_len => best = Some((domain_len, var)), + _ => {} + } + } + best.map(|(_, var)| var) + } + + fn consistent(&self, var: VarId, assignment: &[Option]) -> bool { + // Check only constraints that mention the last-assigned variable. + for &constraint_idx in &self.constraints_by_var[var] { + if !self.constraints[constraint_idx].is_consistent(assignment) { + return false; + } + } + true + } +} + +struct Constraint<'a> { + vars: Vec, + kind: ConstraintKind<'a>, +} + +pub enum ConstraintKind<'a> { + AllDifferent, + Predicate(Box]) -> bool + 'a>), +} + +impl<'a> Constraint<'a> { + fn is_consistent(&self, assignment: &[Option]) -> bool { + match &self.kind { + ConstraintKind::AllDifferent => { + // Check that all assigned values are distinct (partial check is OK). + for i in 0..self.vars.len() { + let Some(left) = assignment[self.vars[i]] else { + continue; + }; + for j in (i + 1)..self.vars.len() { + let Some(right) = assignment[self.vars[j]] else { + continue; + }; + if left == right { + return false; + } + } + } + true + } + ConstraintKind::Predicate(predicate) => predicate(assignment), + } + } +} diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 1dd8b9b..5d5d2c5 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -72,6 +72,7 @@ //! connected nodes, e.g., x0 and y0. this allows both *checking* (of e.g. equality) and //! *inference*: inequal types might be *unified* into a single type. pub mod category; +pub mod csp; pub mod functor; pub mod hypergraph; pub mod subgraph; diff --git a/src/lax/subgraph.rs b/src/lax/subgraph.rs index c58c31f..80483a5 100644 --- a/src/lax/subgraph.rs +++ b/src/lax/subgraph.rs @@ -1,3 +1,4 @@ +use super::csp::{Csp, VarId}; use super::hypergraph::{EdgeId, Hypergraph, NodeId}; #[derive(Debug, Clone, PartialEq, Eq)] @@ -19,7 +20,7 @@ impl SubgraphIsomorphism { impl Hypergraph { /// Find all subgraph isomorphisms from `pattern` into `self`. /// - /// This uses a VF2-style backtracking search over edges, then assigns any isolated nodes. + /// This encodes the matching problem as a small CSP and enumerates solutions. /// The quotient map is ignored; run `quotient` first if you want strict matching. pub fn find_subgraph_isomorphisms_by( &self, @@ -33,6 +34,24 @@ impl Hypergraph { { find_subgraph_isomorphisms_impl(self, pattern, &node_eq, &edge_eq) } + + /// Find all subgraph homomorphisms from `pattern` into `self`. + /// + /// This encodes the matching problem as a small CSP and enumerates solutions, + /// but does not enforce injectivity (mono) on nodes or edges. + /// The quotient map is ignored; run `quotient` first if you want strict matching. + pub fn find_subgraph_homomorphisms_by( + &self, + pattern: &Hypergraph, + node_eq: FN, + edge_eq: FE, + ) -> Vec + where + FN: Fn(&OP, &O) -> bool, + FE: Fn(&AP, &A) -> bool, + { + find_subgraph_homomorphisms_impl(self, pattern, &node_eq, &edge_eq) + } } impl Hypergraph { @@ -43,6 +62,27 @@ impl Hypergraph { ) -> Vec { self.find_subgraph_isomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) } + + /// Find all subgraph homomorphisms from `pattern` into `self` by label equality. + pub fn find_subgraph_homomorphisms( + &self, + pattern: &Hypergraph, + ) -> Vec { + self.find_subgraph_homomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) + } +} + +fn find_subgraph_homomorphisms_impl( + target: &Hypergraph, + pattern: &Hypergraph, + node_eq: &FN, + edge_eq: &FE, +) -> Vec +where + FN: Fn(&OP, &O) -> bool, + FE: Fn(&AP, &A) -> bool, +{ + find_subgraph_matches_impl(target, pattern, node_eq, edge_eq, false) } fn find_subgraph_isomorphisms_impl( @@ -55,8 +95,20 @@ where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, { - use std::cmp::Reverse; + find_subgraph_matches_impl(target, pattern, node_eq, edge_eq, true) +} +fn find_subgraph_matches_impl( + target: &Hypergraph, + pattern: &Hypergraph, + node_eq: &FN, + edge_eq: &FE, + injective: bool, +) -> Vec +where + FN: Fn(&OP, &O) -> bool, + FE: Fn(&AP, &A) -> bool, +{ // Quick cardinality check before doing any work. if pattern.nodes.len() > target.nodes.len() || pattern.edges.len() > target.edges.len() { return Vec::new(); @@ -64,8 +116,6 @@ where // Precompute candidate target edges for each pattern edge. // `edge_candidates[p]` is the list of target edge indices that match `p` by label and arity. - // Rationale: pruning by label/arity early shrinks the search tree dramatically, which is - // the key idea behind VF2-style matching. let mut edge_candidates = Vec::with_capacity(pattern.edges.len()); for (p_edge_idx, p_label) in pattern.edges.iter().enumerate() { let p_adj = &pattern.adjacency[p_edge_idx]; @@ -88,425 +138,118 @@ where edge_candidates.push(candidates); } - // Explore edges with fewer candidates first (and higher arity as a tie-breaker). - // Rationale: "fail fast" ordering reduces backtracking when constraints are tight. - let mut edge_order: Vec = (0..pattern.edges.len()).collect(); - edge_order.sort_by_key(|&edge_idx| { - let arity = - pattern.adjacency[edge_idx].sources.len() + pattern.adjacency[edge_idx].targets.len(); - (edge_candidates[edge_idx].len(), Reverse(arity)) - }); - - // Track isolated nodes so we can assign them after edge mapping. - // Rationale: edge constraints are strongest; assigning isolated nodes earlier just - // multiplies possibilities without adding pruning power. - let mut node_in_edge = vec![false; pattern.nodes.len()]; - for edge in &pattern.adjacency { - for node in edge.sources.iter().chain(edge.targets.iter()) { - node_in_edge[node.0] = true; - } - } - let isolated_nodes: Vec = node_in_edge - .iter() - .enumerate() - .filter_map(|(idx, used)| if *used { None } else { Some(idx) }) - .collect(); - - // VF2 feasibility checks used here: + // Feasibility checks used here: // - Degree compatibility: in/out degree of each pattern node must not exceed the target's. - // - Frontier capacity: remaining unmapped incident edges on a pattern node must fit within - // the remaining incident capacity on its mapped target node. let (pattern_in, pattern_out) = node_degrees(pattern); let (target_in, target_out) = node_degrees(target); - // Mutable state for the backtracking search. - // Rationale: we mutate maps/used flags in-place and roll back to avoid repeated allocation. - let mut matches = Vec::new(); - let mut node_map = vec![None; pattern.nodes.len()]; - let mut edge_map = vec![None; pattern.edges.len()]; - let mut used_target_nodes = vec![false; target.nodes.len()]; - let mut used_target_edges = vec![false; target.edges.len()]; - let mut pattern_mapped_in = vec![0usize; pattern.nodes.len()]; - let mut pattern_mapped_out = vec![0usize; pattern.nodes.len()]; - let mut target_mapped_in = vec![0usize; target.nodes.len()]; - let mut target_mapped_out = vec![0usize; target.nodes.len()]; - - backtrack_edges( - target, - pattern, - node_eq, - &edge_order, - &edge_candidates, - 0, - &isolated_nodes, - &mut node_map, - &mut edge_map, - &mut used_target_nodes, - &mut used_target_edges, - &pattern_in, - &pattern_out, - &target_in, - &target_out, - &mut pattern_mapped_in, - &mut pattern_mapped_out, - &mut target_mapped_in, - &mut target_mapped_out, - &mut matches, - ); - - matches -} - -#[allow(clippy::too_many_arguments)] -fn backtrack_edges( - target: &Hypergraph, - pattern: &Hypergraph, - node_eq: &FN, - edge_order: &[usize], - edge_candidates: &[Vec], - edge_index: usize, - isolated_nodes: &[usize], - node_map: &mut Vec>, - edge_map: &mut Vec>, - used_target_nodes: &mut Vec, - used_target_edges: &mut Vec, - pattern_in: &[usize], - pattern_out: &[usize], - target_in: &[usize], - target_out: &[usize], - pattern_mapped_in: &mut [usize], - pattern_mapped_out: &mut [usize], - target_mapped_in: &mut [usize], - target_mapped_out: &mut [usize], - matches: &mut Vec, -) where - FN: Fn(&OP, &O) -> bool, -{ - // If all edges are mapped, fill in remaining isolated nodes. - // Rationale: at this point only label/injectivity constraints remain. - if edge_index == edge_order.len() { - backtrack_isolated_nodes( - target, - pattern, - node_eq, - isolated_nodes, - 0, - node_map, - edge_map, - used_target_nodes, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - matches, - ); - return; - } - - let p_edge_idx = edge_order[edge_index]; - let p_adj = &pattern.adjacency[p_edge_idx]; - - for &t_edge_idx in &edge_candidates[p_edge_idx] { - if used_target_edges[t_edge_idx] { - continue; - } - let t_adj = &target.adjacency[t_edge_idx]; - - // Optimistically map nodes along this edge; rollback on failure. - // Rationale: we try to extend the partial mapping and undo if any constraint breaks. - let mut newly_mapped = Vec::new(); - let mut ok = true; + // Build a small CSP: variables are pattern nodes and pattern edges. + // Constraints encode label/arity matching, incidence, and injectivity. + let mut csp = Csp::new(); - for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { - if !try_map_node( - target, - pattern, - node_eq, - p_node.0, - t_node.0, - 0, - 1, - node_map, - used_target_nodes, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - &mut newly_mapped, - ) { - ok = false; - break; + let mut node_vars = Vec::with_capacity(pattern.nodes.len()); + for (p_idx, p_label) in pattern.nodes.iter().enumerate() { + let mut domain = Vec::new(); + for (t_idx, t_label) in target.nodes.iter().enumerate() { + // Allow only label-compatible target nodes with sufficient degree. + if !node_eq(p_label, t_label) { + continue; } - } - - if ok { - for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { - if !try_map_node( - target, - pattern, - node_eq, - p_node.0, - t_node.0, - 1, - 0, - node_map, - used_target_nodes, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - &mut newly_mapped, - ) { - ok = false; - break; - } + if !degree_compatible( + p_idx, + t_idx, + &pattern_in, + &pattern_out, + &target_in, + &target_out, + ) { + continue; } + domain.push(t_idx); } - - if ok { - used_target_edges[t_edge_idx] = true; - edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); - apply_edge_incidence( - &p_adj.sources, - &p_adj.targets, - pattern_mapped_in, - pattern_mapped_out, - 1, - ); - apply_edge_incidence( - &t_adj.sources, - &t_adj.targets, - target_mapped_in, - target_mapped_out, - 1, - ); - - backtrack_edges( - target, - pattern, - node_eq, - edge_order, - edge_candidates, - edge_index + 1, - isolated_nodes, - node_map, - edge_map, - used_target_nodes, - used_target_edges, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - matches, - ); - - edge_map[p_edge_idx] = None; - used_target_edges[t_edge_idx] = false; - apply_edge_incidence( - &p_adj.sources, - &p_adj.targets, - pattern_mapped_in, - pattern_mapped_out, - -1, - ); - apply_edge_incidence( - &t_adj.sources, - &t_adj.targets, - target_mapped_in, - target_mapped_out, - -1, - ); - } - - // Revert any provisional node mappings for this candidate edge. - // Rationale: keep the search state consistent for the next candidate. - for p_node_idx in newly_mapped.drain(..) { - let t_node_idx = node_map[p_node_idx].unwrap().0; - node_map[p_node_idx] = None; - used_target_nodes[t_node_idx] = false; + // Empty domain means no match for this pattern node. + if domain.is_empty() { + return Vec::new(); } - } -} - -fn backtrack_isolated_nodes( - target: &Hypergraph, - pattern: &Hypergraph, - node_eq: &FN, - isolated_nodes: &[usize], - idx: usize, - node_map: &mut Vec>, - edge_map: &mut Vec>, - used_target_nodes: &mut Vec, - pattern_in: &[usize], - pattern_out: &[usize], - target_in: &[usize], - target_out: &[usize], - pattern_mapped_in: &mut [usize], - pattern_mapped_out: &mut [usize], - target_mapped_in: &mut [usize], - target_mapped_out: &mut [usize], - matches: &mut Vec, -) where - FN: Fn(&OP, &O) -> bool, -{ - // All isolated nodes assigned; record a complete match. - // Rationale: both edge and node constraints are satisfied at this point. - if idx == isolated_nodes.len() { - let node_map = node_map - .iter() - .map(|node| node.expect("pattern nodes must be mapped")) - .collect(); - let edge_map = edge_map - .iter() - .map(|edge| edge.expect("pattern edges must be mapped")) - .collect(); - matches.push(SubgraphIsomorphism { node_map, edge_map }); - return; + // Create a variable for this pattern node. + let v = csp.add_var(domain); + node_vars.push(v); } - let p_node_idx = isolated_nodes[idx]; - for t_node_idx in 0..target.nodes.len() { - if used_target_nodes[t_node_idx] { - continue; - } - if !degree_feasible( - p_node_idx, - t_node_idx, - 0, - 0, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - ) { - continue; - } - if !node_eq(&pattern.nodes[p_node_idx], &target.nodes[t_node_idx]) { - continue; + let mut edge_vars = Vec::with_capacity(pattern.edges.len()); + for candidates in &edge_candidates { + // Edge variables are restricted to target edges with compatible label + arity. + if candidates.is_empty() && !pattern.edges.is_empty() { + return Vec::new(); } - - node_map[p_node_idx] = Some(NodeId(t_node_idx)); - used_target_nodes[t_node_idx] = true; - - backtrack_isolated_nodes( - target, - pattern, - node_eq, - isolated_nodes, - idx + 1, - node_map, - edge_map, - used_target_nodes, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - matches, - ); - - used_target_nodes[t_node_idx] = false; - node_map[p_node_idx] = None; + let e = csp.add_var(candidates.clone()); + edge_vars.push(e); } -} -fn try_map_node( - target: &Hypergraph, - pattern: &Hypergraph, - node_eq: &FN, - p_node_idx: usize, - t_node_idx: usize, - add_in: usize, - add_out: usize, - node_map: &mut Vec>, - used_target_nodes: &mut Vec, - pattern_in: &[usize], - pattern_out: &[usize], - target_in: &[usize], - target_out: &[usize], - pattern_mapped_in: &mut [usize], - pattern_mapped_out: &mut [usize], - target_mapped_in: &mut [usize], - target_mapped_out: &mut [usize], - newly_mapped: &mut Vec, -) -> bool -where - FN: Fn(&OP, &O) -> bool, -{ - // Try to extend the node mapping with (pattern_node -> target_node). - // If the pattern node is already mapped, this only succeeds when it maps to the same target. - // Otherwise, it checks injectivity and label compatibility before recording the mapping. - if let Some(existing) = node_map[p_node_idx] { - if existing.0 != t_node_idx { - return false; - } - return degree_feasible( - p_node_idx, - t_node_idx, - add_in, - add_out, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - ); - } - // injectivity: a target node can only be used once - if used_target_nodes[t_node_idx] { - return false; - } - // label compatibility: we only allow mapping when the node labels match - if !node_eq(&pattern.nodes[p_node_idx], &target.nodes[t_node_idx]) { - return false; + // Injective node/edge mapping for isomorphisms. + if injective { + csp.add_all_different(node_vars.clone()); + csp.add_all_different(edge_vars.clone()); } - if !degree_feasible( - p_node_idx, - t_node_idx, - add_in, - add_out, - pattern_in, - pattern_out, - target_in, - target_out, - pattern_mapped_in, - pattern_mapped_out, - target_mapped_in, - target_mapped_out, - ) { - return false; + + // Edge incidence constraints: edge variables and the nodes they touch must align. + for (p_edge_idx, p_adj) in pattern.adjacency.iter().enumerate() { + let edge_var = edge_vars[p_edge_idx]; + let source_vars: Vec = p_adj.sources.iter().map(|n| node_vars[n.0]).collect(); + let target_vars: Vec = p_adj.targets.iter().map(|n| node_vars[n.0]).collect(); + let candidates = &edge_candidates[p_edge_idx]; + let target_adjacency = &target.adjacency; + + // Constraint spans one edge var plus all of its incident node vars. + let mut vars = Vec::with_capacity(1 + source_vars.len() + target_vars.len()); + vars.push(edge_var); + vars.extend(source_vars.iter().copied()); + vars.extend(target_vars.iter().copied()); + + // Feasible if the chosen target edge (or some candidate, if unassigned) + // matches all assigned incident node vars positionally. + csp.add_predicate(vars, move |assignment| { + let edge_value = assignment[edge_var]; + + let compatible = |t_edge_idx: usize, assignment: &[Option]| -> bool { + let t_adj = &target_adjacency[t_edge_idx]; + for (var_id, t_node) in source_vars.iter().zip(t_adj.sources.iter()) { + if let Some(value) = assignment[*var_id] { + if value != t_node.0 { + return false; + } + } + } + for (var_id, t_node) in target_vars.iter().zip(t_adj.targets.iter()) { + if let Some(value) = assignment[*var_id] { + if value != t_node.0 { + return false; + } + } + } + true + }; + + match edge_value { + Some(t_edge_idx) => compatible(t_edge_idx, assignment), + None => candidates + .iter() + .copied() + .any(|t_edge_idx| compatible(t_edge_idx, assignment)), + } + }); } - node_map[p_node_idx] = Some(NodeId(t_node_idx)); - used_target_nodes[t_node_idx] = true; - newly_mapped.push(p_node_idx); - true + let solutions = csp.solve_all(); + solutions + .into_iter() + .map(|solution| { + // Convert a CSP assignment into explicit node/edge maps. + let node_map = node_vars.iter().map(|&var| NodeId(solution[var])).collect(); + let edge_map = edge_vars.iter().map(|&var| EdgeId(solution[var])).collect(); + SubgraphIsomorphism { node_map, edge_map } + }) + .collect() } fn node_degrees(graph: &Hypergraph) -> (Vec, Vec) { @@ -523,60 +266,18 @@ fn node_degrees(graph: &Hypergraph) -> (Vec, Vec) { (in_deg, out_deg) } -fn apply_edge_incidence( - sources: &[NodeId], - targets: &[NodeId], - mapped_in: &mut [usize], - mapped_out: &mut [usize], - delta: i32, -) { - if delta >= 0 { - let add = delta as usize; - for node in sources { - mapped_out[node.0] += add; - } - for node in targets { - mapped_in[node.0] += add; - } - } else { - let sub = (-delta) as usize; - for node in sources { - mapped_out[node.0] -= sub; - } - for node in targets { - mapped_in[node.0] -= sub; - } - } -} - -fn degree_feasible( +fn degree_compatible( p_node_idx: usize, t_node_idx: usize, - add_in: usize, - add_out: usize, pattern_in: &[usize], pattern_out: &[usize], target_in: &[usize], target_out: &[usize], - pattern_mapped_in: &[usize], - pattern_mapped_out: &[usize], - target_mapped_in: &[usize], - target_mapped_out: &[usize], ) -> bool { if pattern_in[p_node_idx] > target_in[t_node_idx] || pattern_out[p_node_idx] > target_out[t_node_idx] { return false; } - - let pattern_remaining_in = pattern_in[p_node_idx] - .saturating_sub(pattern_mapped_in[p_node_idx] + add_in); - let pattern_remaining_out = pattern_out[p_node_idx] - .saturating_sub(pattern_mapped_out[p_node_idx] + add_out); - let target_remaining_in = target_in[t_node_idx] - .saturating_sub(target_mapped_in[t_node_idx] + add_in); - let target_remaining_out = target_out[t_node_idx] - .saturating_sub(target_mapped_out[t_node_idx] + add_out); - - pattern_remaining_in <= target_remaining_in && pattern_remaining_out <= target_remaining_out + true } From 6133f9deae7e57749547c751aad82243d0b3ce33 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 17:24:41 +0100 Subject: [PATCH 06/16] wip: remove generic csp, make it specific to hg --- src/lax/csp.rs | 149 ------------ src/lax/mod.rs | 1 - src/lax/subgraph.rs | 524 ++++++++++++++++++++++++++++++++++-------- tests/lax/subgraph.rs | 40 ++++ 4 files changed, 462 insertions(+), 252 deletions(-) delete mode 100644 src/lax/csp.rs diff --git a/src/lax/csp.rs b/src/lax/csp.rs deleted file mode 100644 index 598c9b8..0000000 --- a/src/lax/csp.rs +++ /dev/null @@ -1,149 +0,0 @@ -// A tiny, reusable CSP (Constraint Satisfaction Problem) engine. -// -// This is intentionally minimal: it provides variables with finite domains, -// a few constraint kinds, and a backtracking search that enumerates all solutions. -// The design keeps extension points (new constraints, search heuristics) simple. - -pub type VarId = usize; - -pub struct Csp<'a> { - // Candidate values for each variable (indexed by VarId). - domains: Vec>, - // All constraints in the model; constraints reference variables by VarId. - constraints: Vec>, - // Reverse index so we can check only constraints that mention a variable. - constraints_by_var: Vec>, -} - -impl<'a> Csp<'a> { - pub fn new() -> Self { - Self { - domains: Vec::new(), - constraints: Vec::new(), - constraints_by_var: Vec::new(), - } - } - - pub fn add_var(&mut self, domain: Vec) -> VarId { - // Register a new variable and its allowed values. - let id = self.domains.len(); - self.domains.push(domain); - self.constraints_by_var.push(Vec::new()); - id - } - - pub fn add_all_different(&mut self, vars: Vec) { - // Enforce pairwise inequality across the given variables. - self.add_constraint(vars, ConstraintKind::AllDifferent); - } - - pub fn add_predicate(&mut self, vars: Vec, predicate: F) - where - F: Fn(&[Option]) -> bool + 'a, - { - // Add an n-ary constraint that inspects the partial assignment. - self.add_constraint(vars, ConstraintKind::Predicate(Box::new(predicate))); - } - - pub fn add_constraint(&mut self, vars: Vec, kind: ConstraintKind<'a>) { - // Store the constraint and index it by each referenced variable. - if matches!(kind, ConstraintKind::AllDifferent) && vars.len() <= 1 { - return; - } - let idx = self.constraints.len(); - self.constraints.push(Constraint { vars, kind }); - for &var in &self.constraints[idx].vars { - self.constraints_by_var[var].push(idx); - } - } - - pub fn solve_all(&self) -> Vec> { - // Depth-first search over assignments; enumerates all solutions. - let mut solutions = Vec::new(); - let mut assignment = vec![None; self.domains.len()]; - self.backtrack(&mut assignment, &mut solutions); - solutions - } - - fn backtrack(&self, assignment: &mut [Option], solutions: &mut Vec>) { - let Some(var) = self.select_unassigned_var(assignment) else { - // All variables assigned: record a concrete solution. - let solution = assignment - .iter() - .map(|value| value.expect("all variables assigned")) - .collect(); - solutions.push(solution); - return; - }; - - for &value in &self.domains[var] { - assignment[var] = Some(value); - if self.consistent(var, assignment) { - self.backtrack(assignment, solutions); - } - assignment[var] = None; - } - } - - // Smallest-domain-first heuristic (MRV) to fail fast. - fn select_unassigned_var(&self, assignment: &[Option]) -> Option { - let mut best: Option<(usize, VarId)> = None; - for (var, value) in assignment.iter().enumerate() { - if value.is_some() { - continue; - } - let domain_len = self.domains[var].len(); - match best { - None => best = Some((domain_len, var)), - Some((best_len, _)) if domain_len < best_len => best = Some((domain_len, var)), - _ => {} - } - } - best.map(|(_, var)| var) - } - - fn consistent(&self, var: VarId, assignment: &[Option]) -> bool { - // Check only constraints that mention the last-assigned variable. - for &constraint_idx in &self.constraints_by_var[var] { - if !self.constraints[constraint_idx].is_consistent(assignment) { - return false; - } - } - true - } -} - -struct Constraint<'a> { - vars: Vec, - kind: ConstraintKind<'a>, -} - -pub enum ConstraintKind<'a> { - AllDifferent, - Predicate(Box]) -> bool + 'a>), -} - -impl<'a> Constraint<'a> { - fn is_consistent(&self, assignment: &[Option]) -> bool { - match &self.kind { - ConstraintKind::AllDifferent => { - // Check that all assigned values are distinct (partial check is OK). - for i in 0..self.vars.len() { - let Some(left) = assignment[self.vars[i]] else { - continue; - }; - for j in (i + 1)..self.vars.len() { - let Some(right) = assignment[self.vars[j]] else { - continue; - }; - if left == right { - return false; - } - } - } - true - } - ConstraintKind::Predicate(predicate) => predicate(assignment), - } - } -} diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 5d5d2c5..1dd8b9b 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -72,7 +72,6 @@ //! connected nodes, e.g., x0 and y0. this allows both *checking* (of e.g. equality) and //! *inference*: inequal types might be *unified* into a single type. pub mod category; -pub mod csp; pub mod functor; pub mod hypergraph; pub mod subgraph; diff --git a/src/lax/subgraph.rs b/src/lax/subgraph.rs index 80483a5..3f58fcf 100644 --- a/src/lax/subgraph.rs +++ b/src/lax/subgraph.rs @@ -1,4 +1,3 @@ -use super::csp::{Csp, VarId}; use super::hypergraph::{EdgeId, Hypergraph, NodeId}; #[derive(Debug, Clone, PartialEq, Eq)] @@ -20,7 +19,7 @@ impl SubgraphIsomorphism { impl Hypergraph { /// Find all subgraph isomorphisms from `pattern` into `self`. /// - /// This encodes the matching problem as a small CSP and enumerates solutions. + /// This uses an edge-first backtracking search specialized to hypergraphs. /// The quotient map is ignored; run `quotient` first if you want strict matching. pub fn find_subgraph_isomorphisms_by( &self, @@ -37,7 +36,7 @@ impl Hypergraph { /// Find all subgraph homomorphisms from `pattern` into `self`. /// - /// This encodes the matching problem as a small CSP and enumerates solutions, + /// This uses an edge-first backtracking search specialized to hypergraphs, /// but does not enforce injectivity (mono) on nodes or edges. /// The quotient map is ignored; run `quotient` first if you want strict matching. pub fn find_subgraph_homomorphisms_by( @@ -109,8 +108,8 @@ where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, { - // Quick cardinality check before doing any work. - if pattern.nodes.len() > target.nodes.len() || pattern.edges.len() > target.edges.len() { + let options = MatchOptions { injective }; + if !cardinality_feasible(pattern, target, &options) { return Vec::new(); } @@ -138,118 +137,389 @@ where edge_candidates.push(candidates); } - // Feasibility checks used here: - // - Degree compatibility: in/out degree of each pattern node must not exceed the target's. + // Precompute degrees for pruning in the injective case. let (pattern_in, pattern_out) = node_degrees(pattern); let (target_in, target_out) = node_degrees(target); - // Build a small CSP: variables are pattern nodes and pattern edges. - // Constraints encode label/arity matching, incidence, and injectivity. - let mut csp = Csp::new(); + // Explore edges with fewer candidates first (and higher arity as a tie-breaker). + // Rationale: "fail fast" ordering reduces backtracking when constraints are tight. + let mut edge_order: Vec = (0..pattern.edges.len()).collect(); + edge_order.sort_by_key(|&edge_idx| { + let arity = + pattern.adjacency[edge_idx].sources.len() + pattern.adjacency[edge_idx].targets.len(); + (edge_candidates[edge_idx].len(), std::cmp::Reverse(arity)) + }); - let mut node_vars = Vec::with_capacity(pattern.nodes.len()); - for (p_idx, p_label) in pattern.nodes.iter().enumerate() { - let mut domain = Vec::new(); - for (t_idx, t_label) in target.nodes.iter().enumerate() { - // Allow only label-compatible target nodes with sufficient degree. - if !node_eq(p_label, t_label) { - continue; - } - if !degree_compatible( - p_idx, - t_idx, - &pattern_in, - &pattern_out, - &target_in, - &target_out, - ) { - continue; - } - domain.push(t_idx); + // Track isolated nodes so we can assign them after edge mapping. + let mut node_in_edge = vec![false; pattern.nodes.len()]; + for edge in &pattern.adjacency { + for node in edge.sources.iter().chain(edge.targets.iter()) { + node_in_edge[node.0] = true; } - // Empty domain means no match for this pattern node. - if domain.is_empty() { - return Vec::new(); + } + let isolated_nodes: Vec = node_in_edge + .iter() + .enumerate() + .filter_map(|(idx, used)| if *used { None } else { Some(idx) }) + .collect(); + + let mut state = MatchState::new(pattern, target); + let context = MatchContext::new( + target, + pattern, + node_eq, + &edge_order, + &edge_candidates, + &isolated_nodes, + &pattern_in, + &pattern_out, + &target_in, + &target_out, + &options, + ); + let mut matches = Vec::new(); + + backtrack_edges(&context, 0, &mut state, &mut matches); + + matches +} + +fn cardinality_feasible( + pattern: &Hypergraph, + target: &Hypergraph, + options: &MatchOptions, +) -> bool { + if !options.injective { + return true; + } + pattern.nodes.len() <= target.nodes.len() && pattern.edges.len() <= target.edges.len() +} + +struct MatchOptions { + injective: bool, +} + +struct MatchContext<'a, OP, AP, O, A, FN> +where + FN: Fn(&OP, &O) -> bool, +{ + target: &'a Hypergraph, + pattern: &'a Hypergraph, + node_eq: &'a FN, + edge_order: &'a [usize], + edge_candidates: &'a [Vec], + isolated_nodes: &'a [usize], + pattern_in: &'a [usize], + pattern_out: &'a [usize], + target_in: &'a [usize], + target_out: &'a [usize], + options: &'a MatchOptions, +} + +impl<'a, OP, AP, O, A, FN> MatchContext<'a, OP, AP, O, A, FN> +where + FN: Fn(&OP, &O) -> bool, +{ + fn new( + target: &'a Hypergraph, + pattern: &'a Hypergraph, + node_eq: &'a FN, + edge_order: &'a [usize], + edge_candidates: &'a [Vec], + isolated_nodes: &'a [usize], + pattern_in: &'a [usize], + pattern_out: &'a [usize], + target_in: &'a [usize], + target_out: &'a [usize], + options: &'a MatchOptions, + ) -> Self { + Self { + target, + pattern, + node_eq, + edge_order, + edge_candidates, + isolated_nodes, + pattern_in, + pattern_out, + target_in, + target_out, + options, } - // Create a variable for this pattern node. - let v = csp.add_var(domain); - node_vars.push(v); } +} - let mut edge_vars = Vec::with_capacity(pattern.edges.len()); - for candidates in &edge_candidates { - // Edge variables are restricted to target edges with compatible label + arity. - if candidates.is_empty() && !pattern.edges.is_empty() { - return Vec::new(); +struct MatchState { + node_map: Vec>, + edge_map: Vec>, + used_target_nodes: Vec, + used_target_edges: Vec, + pattern_mapped_in: Vec, + pattern_mapped_out: Vec, + target_mapped_in: Vec, + target_mapped_out: Vec, +} + +impl MatchState { + fn new(pattern: &Hypergraph, target: &Hypergraph) -> Self { + Self { + node_map: vec![None; pattern.nodes.len()], + edge_map: vec![None; pattern.edges.len()], + used_target_nodes: vec![false; target.nodes.len()], + used_target_edges: vec![false; target.edges.len()], + pattern_mapped_in: vec![0usize; pattern.nodes.len()], + pattern_mapped_out: vec![0usize; pattern.nodes.len()], + target_mapped_in: vec![0usize; target.nodes.len()], + target_mapped_out: vec![0usize; target.nodes.len()], } - let e = csp.add_var(candidates.clone()); - edge_vars.push(e); } +} - // Injective node/edge mapping for isomorphisms. - if injective { - csp.add_all_different(node_vars.clone()); - csp.add_all_different(edge_vars.clone()); +fn backtrack_edges( + context: &MatchContext<'_, OP, AP, O, A, FN>, + edge_index: usize, + state: &mut MatchState, + matches: &mut Vec, +) where + FN: Fn(&OP, &O) -> bool, +{ + // If all edges are mapped, fill in remaining isolated nodes. + if edge_index == context.edge_order.len() { + backtrack_isolated_nodes(context, 0, state, matches); + return; } - // Edge incidence constraints: edge variables and the nodes they touch must align. - for (p_edge_idx, p_adj) in pattern.adjacency.iter().enumerate() { - let edge_var = edge_vars[p_edge_idx]; - let source_vars: Vec = p_adj.sources.iter().map(|n| node_vars[n.0]).collect(); - let target_vars: Vec = p_adj.targets.iter().map(|n| node_vars[n.0]).collect(); - let candidates = &edge_candidates[p_edge_idx]; - let target_adjacency = &target.adjacency; - - // Constraint spans one edge var plus all of its incident node vars. - let mut vars = Vec::with_capacity(1 + source_vars.len() + target_vars.len()); - vars.push(edge_var); - vars.extend(source_vars.iter().copied()); - vars.extend(target_vars.iter().copied()); - - // Feasible if the chosen target edge (or some candidate, if unassigned) - // matches all assigned incident node vars positionally. - csp.add_predicate(vars, move |assignment| { - let edge_value = assignment[edge_var]; - - let compatible = |t_edge_idx: usize, assignment: &[Option]| -> bool { - let t_adj = &target_adjacency[t_edge_idx]; - for (var_id, t_node) in source_vars.iter().zip(t_adj.sources.iter()) { - if let Some(value) = assignment[*var_id] { - if value != t_node.0 { - return false; - } - } - } - for (var_id, t_node) in target_vars.iter().zip(t_adj.targets.iter()) { - if let Some(value) = assignment[*var_id] { - if value != t_node.0 { - return false; - } - } + let p_edge_idx = context.edge_order[edge_index]; + let p_adj = &context.pattern.adjacency[p_edge_idx]; + + for &t_edge_idx in &context.edge_candidates[p_edge_idx] { + if context.options.injective && state.used_target_edges[t_edge_idx] { + continue; + } + let t_adj = &context.target.adjacency[t_edge_idx]; + + let mut newly_mapped = Vec::new(); + let mut ok = true; + + for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { + if !try_map_node( + context, + p_node.0, + t_node.0, + 0, + 1, + state, + &mut newly_mapped, + ) { + ok = false; + break; + } + } + + if ok { + for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { + if !try_map_node( + context, + p_node.0, + t_node.0, + 1, + 0, + state, + &mut newly_mapped, + ) { + ok = false; + break; } - true - }; - - match edge_value { - Some(t_edge_idx) => compatible(t_edge_idx, assignment), - None => candidates - .iter() - .copied() - .any(|t_edge_idx| compatible(t_edge_idx, assignment)), } - }); + } + + if ok { + state.edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); + if context.options.injective { + state.used_target_edges[t_edge_idx] = true; + apply_edge_incidence( + &p_adj.sources, + &p_adj.targets, + &mut state.pattern_mapped_in, + &mut state.pattern_mapped_out, + 1, + ); + apply_edge_incidence( + &t_adj.sources, + &t_adj.targets, + &mut state.target_mapped_in, + &mut state.target_mapped_out, + 1, + ); + } + + backtrack_edges(context, edge_index + 1, state, matches); + + state.edge_map[p_edge_idx] = None; + if context.options.injective { + state.used_target_edges[t_edge_idx] = false; + apply_edge_incidence( + &p_adj.sources, + &p_adj.targets, + &mut state.pattern_mapped_in, + &mut state.pattern_mapped_out, + -1, + ); + apply_edge_incidence( + &t_adj.sources, + &t_adj.targets, + &mut state.target_mapped_in, + &mut state.target_mapped_out, + -1, + ); + } + } + + for p_node_idx in newly_mapped.drain(..) { + let t_node_idx = state.node_map[p_node_idx].unwrap().0; + state.node_map[p_node_idx] = None; + if context.options.injective { + state.used_target_nodes[t_node_idx] = false; + } + } + } +} + +fn backtrack_isolated_nodes( + context: &MatchContext<'_, OP, AP, O, A, FN>, + idx: usize, + state: &mut MatchState, + matches: &mut Vec, +) where + FN: Fn(&OP, &O) -> bool, +{ + if idx == context.isolated_nodes.len() { + let node_map = state + .node_map + .iter() + .map(|node| node.expect("pattern nodes must be mapped")) + .collect(); + let edge_map = state + .edge_map + .iter() + .map(|edge| edge.expect("pattern edges must be mapped")) + .collect(); + matches.push(SubgraphIsomorphism { node_map, edge_map }); + return; + } + + let p_node_idx = context.isolated_nodes[idx]; + for t_node_idx in 0..context.target.nodes.len() { + if context.options.injective && state.used_target_nodes[t_node_idx] { + continue; + } + if !degree_feasible( + p_node_idx, + t_node_idx, + 0, + 0, + context.pattern_in, + context.pattern_out, + context.target_in, + context.target_out, + &state.pattern_mapped_in, + &state.pattern_mapped_out, + &state.target_mapped_in, + &state.target_mapped_out, + context.options.injective, + ) { + continue; + } + if !(context.node_eq)( + &context.pattern.nodes[p_node_idx], + &context.target.nodes[t_node_idx], + ) { + continue; + } + + state.node_map[p_node_idx] = Some(NodeId(t_node_idx)); + if context.options.injective { + state.used_target_nodes[t_node_idx] = true; + } + + backtrack_isolated_nodes(context, idx + 1, state, matches); + + if context.options.injective { + state.used_target_nodes[t_node_idx] = false; + } + state.node_map[p_node_idx] = None; + } +} + +#[allow(clippy::too_many_arguments)] +fn try_map_node( + context: &MatchContext<'_, OP, AP, O, A, FN>, + p_node_idx: usize, + t_node_idx: usize, + add_in: usize, + add_out: usize, + state: &mut MatchState, + newly_mapped: &mut Vec, +) -> bool +where + FN: Fn(&OP, &O) -> bool, +{ + if let Some(existing) = state.node_map[p_node_idx] { + if existing.0 != t_node_idx { + return false; + } + if context.options.injective { + return degree_feasible( + p_node_idx, + t_node_idx, + add_in, + add_out, + context.pattern_in, + context.pattern_out, + context.target_in, + context.target_out, + &state.pattern_mapped_in, + &state.pattern_mapped_out, + &state.target_mapped_in, + &state.target_mapped_out, + context.options.injective, + ); + } + return true; + } + if context.options.injective && state.used_target_nodes[t_node_idx] { + return false; + } + if !(context.node_eq)( + &context.pattern.nodes[p_node_idx], + &context.target.nodes[t_node_idx], + ) { + return false; + } + if !degree_feasible( + p_node_idx, + t_node_idx, + add_in, + add_out, + context.pattern_in, + context.pattern_out, + context.target_in, + context.target_out, + &state.pattern_mapped_in, + &state.pattern_mapped_out, + &state.target_mapped_in, + &state.target_mapped_out, + context.options.injective, + ) { + return false; } - let solutions = csp.solve_all(); - solutions - .into_iter() - .map(|solution| { - // Convert a CSP assignment into explicit node/edge maps. - let node_map = node_vars.iter().map(|&var| NodeId(solution[var])).collect(); - let edge_map = edge_vars.iter().map(|&var| EdgeId(solution[var])).collect(); - SubgraphIsomorphism { node_map, edge_map } - }) - .collect() + state.node_map[p_node_idx] = Some(NodeId(t_node_idx)); + if context.options.injective { + state.used_target_nodes[t_node_idx] = true; + } + newly_mapped.push(p_node_idx); + true } fn node_degrees(graph: &Hypergraph) -> (Vec, Vec) { @@ -266,18 +536,68 @@ fn node_degrees(graph: &Hypergraph) -> (Vec, Vec) { (in_deg, out_deg) } -fn degree_compatible( +fn apply_edge_incidence( + sources: &[NodeId], + targets: &[NodeId], + mapped_in: &mut [usize], + mapped_out: &mut [usize], + delta: i32, +) { + if delta >= 0 { + let add = delta as usize; + for node in sources { + mapped_out[node.0] += add; + } + for node in targets { + mapped_in[node.0] += add; + } + } else { + let sub = (-delta) as usize; + for node in sources { + mapped_out[node.0] -= sub; + } + for node in targets { + mapped_in[node.0] -= sub; + } + } +} + +fn degree_feasible( p_node_idx: usize, t_node_idx: usize, + add_in: usize, + add_out: usize, pattern_in: &[usize], pattern_out: &[usize], target_in: &[usize], target_out: &[usize], + pattern_mapped_in: &[usize], + pattern_mapped_out: &[usize], + target_mapped_in: &[usize], + target_mapped_out: &[usize], + injective: bool, ) -> bool { + if !injective { + return true; + } + // Basic degree bound: a pattern node cannot map to a target node with fewer in/out edges. if pattern_in[p_node_idx] > target_in[t_node_idx] || pattern_out[p_node_idx] > target_out[t_node_idx] { return false; } - true + + // Remaining incident edges on the pattern node after this tentative assignment. + let pattern_remaining_in = + pattern_in[p_node_idx].saturating_sub(pattern_mapped_in[p_node_idx] + add_in); + let pattern_remaining_out = + pattern_out[p_node_idx].saturating_sub(pattern_mapped_out[p_node_idx] + add_out); + // Remaining capacity on the target node to host those edges. + let target_remaining_in = + target_in[t_node_idx].saturating_sub(target_mapped_in[t_node_idx] + add_in); + let target_remaining_out = + target_out[t_node_idx].saturating_sub(target_mapped_out[t_node_idx] + add_out); + + // Feasible if the target has enough unused incident capacity to fit the pattern. + pattern_remaining_in <= target_remaining_in && pattern_remaining_out <= target_remaining_out } diff --git a/tests/lax/subgraph.rs b/tests/lax/subgraph.rs index ed0705b..deed093 100644 --- a/tests/lax/subgraph.rs +++ b/tests/lax/subgraph.rs @@ -265,3 +265,43 @@ fn test_subgraph_isomorphisms_isolated_nodes_duplicate_labels() { let matches = target.find_subgraph_isomorphisms(&pattern); assert_eq!(matches.len(), 6); } + +#[test] +fn test_subgraph_homomorphisms_allow_node_merging() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + target.new_edge('f', (vec![n0], vec![n0])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(0); + pattern.new_edge('f', (vec![p0], vec![p1])); + + let iso_matches = target.find_subgraph_isomorphisms(&pattern); + assert!(iso_matches.is_empty()); + + let homo_matches = target.find_subgraph_homomorphisms(&pattern); + assert_eq!(homo_matches.len(), 1); + assert_eq!(homo_matches[0].node_map()[0], n0); + assert_eq!(homo_matches[0].node_map()[1], n0); +} + +#[test] +fn test_subgraph_homomorphisms_allow_edge_merging() { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(0); + target.new_edge('h', (vec![n0], vec![n1])); + + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(0); + pattern.new_edge('h', (vec![p0], vec![p1])); + pattern.new_edge('h', (vec![p0], vec![p1])); + + let iso_matches = target.find_subgraph_isomorphisms(&pattern); + assert!(iso_matches.is_empty()); + + let homo_matches = target.find_subgraph_homomorphisms(&pattern); + assert_eq!(homo_matches.len(), 1); +} From f39467e3ae7de87d75e73c2b87ab78f35561967f Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 17:26:57 +0100 Subject: [PATCH 07/16] renaming --- src/lax/{subgraph.rs => matching.rs} | 0 src/lax/mod.rs | 2 +- tests/lax/{subgraph.rs => matching.rs} | 0 tests/lax/mod.rs | 2 +- 4 files changed, 2 insertions(+), 2 deletions(-) rename src/lax/{subgraph.rs => matching.rs} (100%) rename tests/lax/{subgraph.rs => matching.rs} (100%) diff --git a/src/lax/subgraph.rs b/src/lax/matching.rs similarity index 100% rename from src/lax/subgraph.rs rename to src/lax/matching.rs diff --git a/src/lax/mod.rs b/src/lax/mod.rs index 1dd8b9b..ba09ce6 100644 --- a/src/lax/mod.rs +++ b/src/lax/mod.rs @@ -74,7 +74,7 @@ pub mod category; pub mod functor; pub mod hypergraph; -pub mod subgraph; +pub mod matching; pub mod mut_category; pub mod open_hypergraph; diff --git a/tests/lax/subgraph.rs b/tests/lax/matching.rs similarity index 100% rename from tests/lax/subgraph.rs rename to tests/lax/matching.rs diff --git a/tests/lax/mod.rs b/tests/lax/mod.rs index 21e76f4..e85884d 100644 --- a/tests/lax/mod.rs +++ b/tests/lax/mod.rs @@ -1,3 +1,3 @@ pub mod eval; pub mod hypergraph; -pub mod subgraph; +pub mod matching; From 529e3fcf2128a8403eb978a7c2a8915f425b65f8 Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 18:11:33 +0100 Subject: [PATCH 08/16] rename --- src/lax/matching.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/lax/matching.rs b/src/lax/matching.rs index 3f58fcf..8c19fb0 100644 --- a/src/lax/matching.rs +++ b/src/lax/matching.rs @@ -1,12 +1,12 @@ use super::hypergraph::{EdgeId, Hypergraph, NodeId}; #[derive(Debug, Clone, PartialEq, Eq)] -pub struct SubgraphIsomorphism { +pub struct Morphism { node_map: Vec, edge_map: Vec, } -impl SubgraphIsomorphism { +impl Morphism { pub fn node_map(&self) -> &[NodeId] { &self.node_map } @@ -26,7 +26,7 @@ impl Hypergraph { pattern: &Hypergraph, node_eq: FN, edge_eq: FE, - ) -> Vec + ) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, @@ -44,7 +44,7 @@ impl Hypergraph { pattern: &Hypergraph, node_eq: FN, edge_eq: FE, - ) -> Vec + ) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, @@ -58,7 +58,7 @@ impl Hypergraph { pub fn find_subgraph_isomorphisms( &self, pattern: &Hypergraph, - ) -> Vec { + ) -> Vec { self.find_subgraph_isomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) } @@ -66,7 +66,7 @@ impl Hypergraph { pub fn find_subgraph_homomorphisms( &self, pattern: &Hypergraph, - ) -> Vec { + ) -> Vec { self.find_subgraph_homomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) } } @@ -76,7 +76,7 @@ fn find_subgraph_homomorphisms_impl( pattern: &Hypergraph, node_eq: &FN, edge_eq: &FE, -) -> Vec +) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, @@ -89,7 +89,7 @@ fn find_subgraph_isomorphisms_impl( pattern: &Hypergraph, node_eq: &FN, edge_eq: &FE, -) -> Vec +) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, @@ -103,7 +103,7 @@ fn find_subgraph_matches_impl( node_eq: &FN, edge_eq: &FE, injective: bool, -) -> Vec +) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, @@ -279,7 +279,7 @@ fn backtrack_edges( context: &MatchContext<'_, OP, AP, O, A, FN>, edge_index: usize, state: &mut MatchState, - matches: &mut Vec, + matches: &mut Vec, ) where FN: Fn(&OP, &O) -> bool, { @@ -389,7 +389,7 @@ fn backtrack_isolated_nodes( context: &MatchContext<'_, OP, AP, O, A, FN>, idx: usize, state: &mut MatchState, - matches: &mut Vec, + matches: &mut Vec, ) where FN: Fn(&OP, &O) -> bool, { @@ -404,7 +404,7 @@ fn backtrack_isolated_nodes( .iter() .map(|edge| edge.expect("pattern edges must be mapped")) .collect(); - matches.push(SubgraphIsomorphism { node_map, edge_map }); + matches.push(Morphism { node_map, edge_map }); return; } From db09506778b99e4d06f032cde9f4efe733cc95bf Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 18:29:47 +0100 Subject: [PATCH 09/16] simplify code --- src/lax/matching.rs | 151 ++++++++++++++++++++++++++------------------ 1 file changed, 90 insertions(+), 61 deletions(-) diff --git a/src/lax/matching.rs b/src/lax/matching.rs index 8c19fb0..976757b 100644 --- a/src/lax/matching.rs +++ b/src/lax/matching.rs @@ -273,6 +273,55 @@ impl MatchState { target_mapped_out: vec![0usize; target.nodes.len()], } } + + fn commit_edge_mapping( + &mut self, + p_edge_idx: usize, + t_edge_idx: usize, + p_sources: &[NodeId], + p_targets: &[NodeId], + t_sources: &[NodeId], + t_targets: &[NodeId], + options: &MatchOptions, + ) { + // Record the edge mapping and update incremental counters if injective. + self.edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); + if options.injective { + self.used_target_edges[t_edge_idx] = true; + add_edge_incidence(p_sources, p_targets, &mut self.pattern_mapped_in, &mut self.pattern_mapped_out); + add_edge_incidence(t_sources, t_targets, &mut self.target_mapped_in, &mut self.target_mapped_out); + } + } + + fn rollback_edge_mapping( + &mut self, + p_edge_idx: usize, + t_edge_idx: usize, + p_sources: &[NodeId], + p_targets: &[NodeId], + t_sources: &[NodeId], + t_targets: &[NodeId], + options: &MatchOptions, + ) { + // Undo the edge mapping and counters. + self.edge_map[p_edge_idx] = None; + if options.injective { + self.used_target_edges[t_edge_idx] = false; + remove_edge_incidence(p_sources, p_targets, &mut self.pattern_mapped_in, &mut self.pattern_mapped_out); + remove_edge_incidence(t_sources, t_targets, &mut self.target_mapped_in, &mut self.target_mapped_out); + } + } + + fn rollback_new_nodes(&mut self, newly_mapped: &mut Vec, options: &MatchOptions) { + // Undo node bindings created while exploring a candidate edge. + for p_node_idx in newly_mapped.drain(..) { + let t_node_idx = self.node_map[p_node_idx].unwrap().0; + self.node_map[p_node_idx] = None; + if options.injective { + self.used_target_nodes[t_node_idx] = false; + } + } + } } fn backtrack_edges( @@ -298,6 +347,7 @@ fn backtrack_edges( } let t_adj = &context.target.adjacency[t_edge_idx]; + // Track nodes that are newly bound by this edge so we can undo them on failure/return. let mut newly_mapped = Vec::new(); let mut ok = true; @@ -334,54 +384,31 @@ fn backtrack_edges( } if ok { - state.edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); - if context.options.injective { - state.used_target_edges[t_edge_idx] = true; - apply_edge_incidence( - &p_adj.sources, - &p_adj.targets, - &mut state.pattern_mapped_in, - &mut state.pattern_mapped_out, - 1, - ); - apply_edge_incidence( - &t_adj.sources, - &t_adj.targets, - &mut state.target_mapped_in, - &mut state.target_mapped_out, - 1, - ); - } + state.commit_edge_mapping( + p_edge_idx, + t_edge_idx, + &p_adj.sources, + &p_adj.targets, + &t_adj.sources, + &t_adj.targets, + context.options, + ); backtrack_edges(context, edge_index + 1, state, matches); - state.edge_map[p_edge_idx] = None; - if context.options.injective { - state.used_target_edges[t_edge_idx] = false; - apply_edge_incidence( - &p_adj.sources, - &p_adj.targets, - &mut state.pattern_mapped_in, - &mut state.pattern_mapped_out, - -1, - ); - apply_edge_incidence( - &t_adj.sources, - &t_adj.targets, - &mut state.target_mapped_in, - &mut state.target_mapped_out, - -1, - ); - } + state.rollback_edge_mapping( + p_edge_idx, + t_edge_idx, + &p_adj.sources, + &p_adj.targets, + &t_adj.sources, + &t_adj.targets, + context.options, + ); } - for p_node_idx in newly_mapped.drain(..) { - let t_node_idx = state.node_map[p_node_idx].unwrap().0; - state.node_map[p_node_idx] = None; - if context.options.injective { - state.used_target_nodes[t_node_idx] = false; - } - } + // Roll back any provisional node bindings from this edge attempt. + state.rollback_new_nodes(&mut newly_mapped, context.options); } } @@ -536,29 +563,31 @@ fn node_degrees(graph: &Hypergraph) -> (Vec, Vec) { (in_deg, out_deg) } -fn apply_edge_incidence( +fn add_edge_incidence( sources: &[NodeId], targets: &[NodeId], mapped_in: &mut [usize], mapped_out: &mut [usize], - delta: i32, ) { - if delta >= 0 { - let add = delta as usize; - for node in sources { - mapped_out[node.0] += add; - } - for node in targets { - mapped_in[node.0] += add; - } - } else { - let sub = (-delta) as usize; - for node in sources { - mapped_out[node.0] -= sub; - } - for node in targets { - mapped_in[node.0] -= sub; - } + for node in sources { + mapped_out[node.0] += 1; + } + for node in targets { + mapped_in[node.0] += 1; + } +} + +fn remove_edge_incidence( + sources: &[NodeId], + targets: &[NodeId], + mapped_in: &mut [usize], + mapped_out: &mut [usize], +) { + for node in sources { + mapped_out[node.0] -= 1; + } + for node in targets { + mapped_in[node.0] -= 1; } } From c5d88d67350639b1f7a012c97db65ad36c5c864a Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 18:40:44 +0100 Subject: [PATCH 10/16] simplify code --- src/lax/matching.rs | 153 +++++++++++++++++++++++--------------------- 1 file changed, 79 insertions(+), 74 deletions(-) diff --git a/src/lax/matching.rs b/src/lax/matching.rs index 976757b..efa03b5 100644 --- a/src/lax/matching.rs +++ b/src/lax/matching.rs @@ -55,18 +55,12 @@ impl Hypergraph { impl Hypergraph { /// Find all subgraph isomorphisms from `pattern` into `self` by label equality. - pub fn find_subgraph_isomorphisms( - &self, - pattern: &Hypergraph, - ) -> Vec { + pub fn find_subgraph_isomorphisms(&self, pattern: &Hypergraph) -> Vec { self.find_subgraph_isomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) } /// Find all subgraph homomorphisms from `pattern` into `self` by label equality. - pub fn find_subgraph_homomorphisms( - &self, - pattern: &Hypergraph, - ) -> Vec { + pub fn find_subgraph_homomorphisms(&self, pattern: &Hypergraph) -> Vec { self.find_subgraph_homomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) } } @@ -288,8 +282,18 @@ impl MatchState { self.edge_map[p_edge_idx] = Some(EdgeId(t_edge_idx)); if options.injective { self.used_target_edges[t_edge_idx] = true; - add_edge_incidence(p_sources, p_targets, &mut self.pattern_mapped_in, &mut self.pattern_mapped_out); - add_edge_incidence(t_sources, t_targets, &mut self.target_mapped_in, &mut self.target_mapped_out); + add_edge_incidence( + p_sources, + p_targets, + &mut self.pattern_mapped_in, + &mut self.pattern_mapped_out, + ); + add_edge_incidence( + t_sources, + t_targets, + &mut self.target_mapped_in, + &mut self.target_mapped_out, + ); } } @@ -307,14 +311,24 @@ impl MatchState { self.edge_map[p_edge_idx] = None; if options.injective { self.used_target_edges[t_edge_idx] = false; - remove_edge_incidence(p_sources, p_targets, &mut self.pattern_mapped_in, &mut self.pattern_mapped_out); - remove_edge_incidence(t_sources, t_targets, &mut self.target_mapped_in, &mut self.target_mapped_out); + remove_edge_incidence( + p_sources, + p_targets, + &mut self.pattern_mapped_in, + &mut self.pattern_mapped_out, + ); + remove_edge_incidence( + t_sources, + t_targets, + &mut self.target_mapped_in, + &mut self.target_mapped_out, + ); } } - fn rollback_new_nodes(&mut self, newly_mapped: &mut Vec, options: &MatchOptions) { + fn rollback_new_nodes(&mut self, newly_mapped: Vec, options: &MatchOptions) { // Undo node bindings created while exploring a candidate edge. - for p_node_idx in newly_mapped.drain(..) { + for p_node_idx in newly_mapped { let t_node_idx = self.node_map[p_node_idx].unwrap().0; self.node_map[p_node_idx] = None; if options.injective { @@ -322,6 +336,31 @@ impl MatchState { } } } + + fn commit_edge_nodes( + &mut self, + context: &MatchContext<'_, OP, AP, O, A, FN>, + p_adj: &super::hypergraph::Hyperedge, + t_adj: &super::hypergraph::Hyperedge, + ) -> Option> + where + FN: Fn(&OP, &O) -> bool, + { + let mut newly_mapped = Vec::new(); + for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { + if !try_map_node(context, p_node.0, t_node.0, 0, 1, self, &mut newly_mapped) { + self.rollback_new_nodes(newly_mapped, context.options); + return None; + } + } + for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { + if !try_map_node(context, p_node.0, t_node.0, 1, 0, self, &mut newly_mapped) { + self.rollback_new_nodes(newly_mapped, context.options); + return None; + } + } + Some(newly_mapped) + } } fn backtrack_edges( @@ -347,68 +386,34 @@ fn backtrack_edges( } let t_adj = &context.target.adjacency[t_edge_idx]; - // Track nodes that are newly bound by this edge so we can undo them on failure/return. - let mut newly_mapped = Vec::new(); - let mut ok = true; - - for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { - if !try_map_node( - context, - p_node.0, - t_node.0, - 0, - 1, - state, - &mut newly_mapped, - ) { - ok = false; - break; - } - } - - if ok { - for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { - if !try_map_node( - context, - p_node.0, - t_node.0, - 1, - 0, - state, - &mut newly_mapped, - ) { - ok = false; - break; - } - } - } - - if ok { - state.commit_edge_mapping( - p_edge_idx, - t_edge_idx, - &p_adj.sources, - &p_adj.targets, - &t_adj.sources, - &t_adj.targets, - context.options, - ); - - backtrack_edges(context, edge_index + 1, state, matches); - - state.rollback_edge_mapping( - p_edge_idx, - t_edge_idx, - &p_adj.sources, - &p_adj.targets, - &t_adj.sources, - &t_adj.targets, - context.options, - ); - } + let Some(newly_mapped) = state.commit_edge_nodes(context, p_adj, t_adj) else { + continue; + }; + + state.commit_edge_mapping( + p_edge_idx, + t_edge_idx, + &p_adj.sources, + &p_adj.targets, + &t_adj.sources, + &t_adj.targets, + context.options, + ); + + backtrack_edges(context, edge_index + 1, state, matches); + + state.rollback_edge_mapping( + p_edge_idx, + t_edge_idx, + &p_adj.sources, + &p_adj.targets, + &t_adj.sources, + &t_adj.targets, + context.options, + ); // Roll back any provisional node bindings from this edge attempt. - state.rollback_new_nodes(&mut newly_mapped, context.options); + state.rollback_new_nodes(newly_mapped, context.options); } } From 81d4e00175a53501653a118b5dd0b0ef4217f23e Mon Sep 17 00:00:00 2001 From: mstn Date: Mon, 2 Feb 2026 18:45:33 +0100 Subject: [PATCH 11/16] cleaning --- src/lax/matching.rs | 88 +++++++++++---------------------------------- 1 file changed, 21 insertions(+), 67 deletions(-) diff --git a/src/lax/matching.rs b/src/lax/matching.rs index efa03b5..bbf1684 100644 --- a/src/lax/matching.rs +++ b/src/lax/matching.rs @@ -445,21 +445,7 @@ fn backtrack_isolated_nodes( if context.options.injective && state.used_target_nodes[t_node_idx] { continue; } - if !degree_feasible( - p_node_idx, - t_node_idx, - 0, - 0, - context.pattern_in, - context.pattern_out, - context.target_in, - context.target_out, - &state.pattern_mapped_in, - &state.pattern_mapped_out, - &state.target_mapped_in, - &state.target_mapped_out, - context.options.injective, - ) { + if !degree_feasible(context, state, p_node_idx, t_node_idx, 0, 0) { continue; } if !(context.node_eq)( @@ -501,21 +487,7 @@ where return false; } if context.options.injective { - return degree_feasible( - p_node_idx, - t_node_idx, - add_in, - add_out, - context.pattern_in, - context.pattern_out, - context.target_in, - context.target_out, - &state.pattern_mapped_in, - &state.pattern_mapped_out, - &state.target_mapped_in, - &state.target_mapped_out, - context.options.injective, - ); + return degree_feasible(context, state, p_node_idx, t_node_idx, add_in, add_out); } return true; } @@ -528,21 +500,7 @@ where ) { return false; } - if !degree_feasible( - p_node_idx, - t_node_idx, - add_in, - add_out, - context.pattern_in, - context.pattern_out, - context.target_in, - context.target_out, - &state.pattern_mapped_in, - &state.pattern_mapped_out, - &state.target_mapped_in, - &state.target_mapped_out, - context.options.injective, - ) { + if !degree_feasible(context, state, p_node_idx, t_node_idx, add_in, add_out) { return false; } @@ -596,41 +554,37 @@ fn remove_edge_incidence( } } -fn degree_feasible( +fn degree_feasible( + context: &MatchContext<'_, OP, AP, O, A, FN>, + state: &MatchState, p_node_idx: usize, t_node_idx: usize, add_in: usize, add_out: usize, - pattern_in: &[usize], - pattern_out: &[usize], - target_in: &[usize], - target_out: &[usize], - pattern_mapped_in: &[usize], - pattern_mapped_out: &[usize], - target_mapped_in: &[usize], - target_mapped_out: &[usize], - injective: bool, -) -> bool { - if !injective { +) -> bool +where + FN: Fn(&OP, &O) -> bool, +{ + if !context.options.injective { return true; } // Basic degree bound: a pattern node cannot map to a target node with fewer in/out edges. - if pattern_in[p_node_idx] > target_in[t_node_idx] - || pattern_out[p_node_idx] > target_out[t_node_idx] + if context.pattern_in[p_node_idx] > context.target_in[t_node_idx] + || context.pattern_out[p_node_idx] > context.target_out[t_node_idx] { return false; } // Remaining incident edges on the pattern node after this tentative assignment. - let pattern_remaining_in = - pattern_in[p_node_idx].saturating_sub(pattern_mapped_in[p_node_idx] + add_in); - let pattern_remaining_out = - pattern_out[p_node_idx].saturating_sub(pattern_mapped_out[p_node_idx] + add_out); + let pattern_remaining_in = context.pattern_in[p_node_idx] + .saturating_sub(state.pattern_mapped_in[p_node_idx] + add_in); + let pattern_remaining_out = context.pattern_out[p_node_idx] + .saturating_sub(state.pattern_mapped_out[p_node_idx] + add_out); // Remaining capacity on the target node to host those edges. - let target_remaining_in = - target_in[t_node_idx].saturating_sub(target_mapped_in[t_node_idx] + add_in); - let target_remaining_out = - target_out[t_node_idx].saturating_sub(target_mapped_out[t_node_idx] + add_out); + let target_remaining_in = context.target_in[t_node_idx] + .saturating_sub(state.target_mapped_in[t_node_idx] + add_in); + let target_remaining_out = context.target_out[t_node_idx] + .saturating_sub(state.target_mapped_out[t_node_idx] + add_out); // Feasible if the target has enough unused incident capacity to fit the pattern. pattern_remaining_in <= target_remaining_in && pattern_remaining_out <= target_remaining_out From 60bbdc5a6ff2a5a7bee1f9c2c425ab0f40e90ba2 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 3 Feb 2026 10:57:41 +0100 Subject: [PATCH 12/16] add trace --- src/lax/matching.rs | 213 +++++++++++++++++++++++++++++++++++++++--- tests/lax/matching.rs | 34 +++---- 2 files changed, 215 insertions(+), 32 deletions(-) diff --git a/src/lax/matching.rs b/src/lax/matching.rs index bbf1684..f983817 100644 --- a/src/lax/matching.rs +++ b/src/lax/matching.rs @@ -16,6 +16,49 @@ impl Morphism { } } +pub trait MatchTrace { + fn on_event(&self, _event: MatchEvent) {} +} + +pub struct NoopTrace; + +impl MatchTrace for NoopTrace {} + +static NOOP_TRACE: NoopTrace = NoopTrace; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum MatchEvent { + EnterFrame { + depth: usize, + frame_id: usize, + }, + Decision { + pick_edge: Option, + pick_node: Option, + choice_features: &'static str, + candidate_count: usize, + heuristic_tag: &'static str, + depth: usize, + }, + Branch { + include_edge: Option, + include_node: Option, + depth: usize, + }, + PropagationSummary { + depth: usize, + }, + Prune { + reason: &'static str, + depth: usize, + }, + Solution, + ExitFrame { + depth: usize, + frame_id: usize, + }, +} + impl Hypergraph { /// Find all subgraph isomorphisms from `pattern` into `self`. /// @@ -26,12 +69,14 @@ impl Hypergraph { pattern: &Hypergraph, node_eq: FN, edge_eq: FE, + trace: Option<&dyn MatchTrace>, ) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, { - find_subgraph_isomorphisms_impl(self, pattern, &node_eq, &edge_eq) + let trace = trace.unwrap_or(&NOOP_TRACE); + find_subgraph_isomorphisms_impl(self, pattern, &node_eq, &edge_eq, trace) } /// Find all subgraph homomorphisms from `pattern` into `self`. @@ -44,24 +89,34 @@ impl Hypergraph { pattern: &Hypergraph, node_eq: FN, edge_eq: FE, + trace: Option<&dyn MatchTrace>, ) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, { - find_subgraph_homomorphisms_impl(self, pattern, &node_eq, &edge_eq) + let trace = trace.unwrap_or(&NOOP_TRACE); + find_subgraph_homomorphisms_impl(self, pattern, &node_eq, &edge_eq, trace) } } impl Hypergraph { /// Find all subgraph isomorphisms from `pattern` into `self` by label equality. - pub fn find_subgraph_isomorphisms(&self, pattern: &Hypergraph) -> Vec { - self.find_subgraph_isomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) + pub fn find_subgraph_isomorphisms( + &self, + pattern: &Hypergraph, + trace: Option<&dyn MatchTrace>, + ) -> Vec { + self.find_subgraph_isomorphisms_by(pattern, |a, b| a == b, |a, b| a == b, trace) } /// Find all subgraph homomorphisms from `pattern` into `self` by label equality. - pub fn find_subgraph_homomorphisms(&self, pattern: &Hypergraph) -> Vec { - self.find_subgraph_homomorphisms_by(pattern, |a, b| a == b, |a, b| a == b) + pub fn find_subgraph_homomorphisms( + &self, + pattern: &Hypergraph, + trace: Option<&dyn MatchTrace>, + ) -> Vec { + self.find_subgraph_homomorphisms_by(pattern, |a, b| a == b, |a, b| a == b, trace) } } @@ -70,12 +125,13 @@ fn find_subgraph_homomorphisms_impl( pattern: &Hypergraph, node_eq: &FN, edge_eq: &FE, + trace: &dyn MatchTrace, ) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, { - find_subgraph_matches_impl(target, pattern, node_eq, edge_eq, false) + find_subgraph_matches_impl(target, pattern, node_eq, edge_eq, false, trace) } fn find_subgraph_isomorphisms_impl( @@ -83,12 +139,13 @@ fn find_subgraph_isomorphisms_impl( pattern: &Hypergraph, node_eq: &FN, edge_eq: &FE, + trace: &dyn MatchTrace, ) -> Vec where FN: Fn(&OP, &O) -> bool, FE: Fn(&AP, &A) -> bool, { - find_subgraph_matches_impl(target, pattern, node_eq, edge_eq, true) + find_subgraph_matches_impl(target, pattern, node_eq, edge_eq, true, trace) } fn find_subgraph_matches_impl( @@ -97,6 +154,7 @@ fn find_subgraph_matches_impl( node_eq: &FN, edge_eq: &FE, injective: bool, + trace: &dyn MatchTrace, ) -> Vec where FN: Fn(&OP, &O) -> bool, @@ -170,6 +228,7 @@ where &target_in, &target_out, &options, + trace, ); let mut matches = Vec::new(); @@ -208,6 +267,7 @@ where target_in: &'a [usize], target_out: &'a [usize], options: &'a MatchOptions, + trace: &'a dyn MatchTrace, } impl<'a, OP, AP, O, A, FN> MatchContext<'a, OP, AP, O, A, FN> @@ -226,6 +286,7 @@ where target_in: &'a [usize], target_out: &'a [usize], options: &'a MatchOptions, + trace: &'a dyn MatchTrace, ) -> Self { Self { target, @@ -239,6 +300,7 @@ where target_in, target_out, options, + trace, } } } @@ -252,6 +314,7 @@ struct MatchState { pattern_mapped_out: Vec, target_mapped_in: Vec, target_mapped_out: Vec, + next_frame_id: usize, } impl MatchState { @@ -265,9 +328,21 @@ impl MatchState { pattern_mapped_out: vec![0usize; pattern.nodes.len()], target_mapped_in: vec![0usize; target.nodes.len()], target_mapped_out: vec![0usize; target.nodes.len()], + next_frame_id: 0, } } + fn enter_frame(&mut self, trace: &dyn MatchTrace, depth: usize) -> usize { + let frame_id = self.next_frame_id; + self.next_frame_id += 1; + trace.on_event(MatchEvent::EnterFrame { depth, frame_id }); + frame_id + } + + fn exit_frame(&self, trace: &dyn MatchTrace, depth: usize, frame_id: usize) { + trace.on_event(MatchEvent::ExitFrame { depth, frame_id }); + } + fn commit_edge_mapping( &mut self, p_edge_idx: usize, @@ -342,19 +417,38 @@ impl MatchState { context: &MatchContext<'_, OP, AP, O, A, FN>, p_adj: &super::hypergraph::Hyperedge, t_adj: &super::hypergraph::Hyperedge, + depth: usize, ) -> Option> where FN: Fn(&OP, &O) -> bool, { let mut newly_mapped = Vec::new(); for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { - if !try_map_node(context, p_node.0, t_node.0, 0, 1, self, &mut newly_mapped) { + if !try_map_node( + context, + p_node.0, + t_node.0, + 0, + 1, + self, + &mut newly_mapped, + depth, + ) { self.rollback_new_nodes(newly_mapped, context.options); return None; } } for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { - if !try_map_node(context, p_node.0, t_node.0, 1, 0, self, &mut newly_mapped) { + if !try_map_node( + context, + p_node.0, + t_node.0, + 1, + 0, + self, + &mut newly_mapped, + depth, + ) { self.rollback_new_nodes(newly_mapped, context.options); return None; } @@ -371,22 +465,45 @@ fn backtrack_edges( ) where FN: Fn(&OP, &O) -> bool, { + let frame_id = state.enter_frame(context.trace, edge_index); // If all edges are mapped, fill in remaining isolated nodes. if edge_index == context.edge_order.len() { backtrack_isolated_nodes(context, 0, state, matches); + state.exit_frame(context.trace, edge_index, frame_id); return; } let p_edge_idx = context.edge_order[edge_index]; let p_adj = &context.pattern.adjacency[p_edge_idx]; + context.trace.on_event(MatchEvent::Decision { + pick_edge: Some(p_edge_idx), + pick_node: None, + choice_features: "edge_order", + candidate_count: context.edge_candidates[p_edge_idx].len(), + heuristic_tag: "min_candidates_then_arity", + depth: edge_index, + }); for &t_edge_idx in &context.edge_candidates[p_edge_idx] { + context.trace.on_event(MatchEvent::Branch { + include_edge: Some(t_edge_idx), + include_node: None, + depth: edge_index, + }); if context.options.injective && state.used_target_edges[t_edge_idx] { + context.trace.on_event(MatchEvent::Prune { + reason: "edge_used", + depth: edge_index, + }); continue; } let t_adj = &context.target.adjacency[t_edge_idx]; - let Some(newly_mapped) = state.commit_edge_nodes(context, p_adj, t_adj) else { + let Some(newly_mapped) = state.commit_edge_nodes(context, p_adj, t_adj, edge_index) else { + context.trace.on_event(MatchEvent::Prune { + reason: "node_mapping_failed", + depth: edge_index, + }); continue; }; @@ -399,6 +516,9 @@ fn backtrack_edges( &t_adj.targets, context.options, ); + context + .trace + .on_event(MatchEvent::PropagationSummary { depth: edge_index }); backtrack_edges(context, edge_index + 1, state, matches); @@ -415,6 +535,7 @@ fn backtrack_edges( // Roll back any provisional node bindings from this edge attempt. state.rollback_new_nodes(newly_mapped, context.options); } + state.exit_frame(context.trace, edge_index, frame_id); } fn backtrack_isolated_nodes( @@ -425,6 +546,7 @@ fn backtrack_isolated_nodes( ) where FN: Fn(&OP, &O) -> bool, { + let frame_id = state.enter_frame(context.trace, idx); if idx == context.isolated_nodes.len() { let node_map = state .node_map @@ -437,21 +559,43 @@ fn backtrack_isolated_nodes( .map(|edge| edge.expect("pattern edges must be mapped")) .collect(); matches.push(Morphism { node_map, edge_map }); + context.trace.on_event(MatchEvent::Solution); + state.exit_frame(context.trace, idx, frame_id); return; } let p_node_idx = context.isolated_nodes[idx]; + context.trace.on_event(MatchEvent::Decision { + pick_edge: None, + pick_node: Some(p_node_idx), + choice_features: "isolated_nodes", + candidate_count: context.target.nodes.len(), + heuristic_tag: "isolated_nodes_order", + depth: idx, + }); for t_node_idx in 0..context.target.nodes.len() { if context.options.injective && state.used_target_nodes[t_node_idx] { + context.trace.on_event(MatchEvent::Prune { + reason: "node_used", + depth: idx, + }); continue; } if !degree_feasible(context, state, p_node_idx, t_node_idx, 0, 0) { + context.trace.on_event(MatchEvent::Prune { + reason: "degree_infeasible", + depth: idx, + }); continue; } if !(context.node_eq)( &context.pattern.nodes[p_node_idx], &context.target.nodes[t_node_idx], ) { + context.trace.on_event(MatchEvent::Prune { + reason: "label_mismatch", + depth: idx, + }); continue; } @@ -459,6 +603,14 @@ fn backtrack_isolated_nodes( if context.options.injective { state.used_target_nodes[t_node_idx] = true; } + context.trace.on_event(MatchEvent::Branch { + include_edge: None, + include_node: Some(t_node_idx), + depth: idx, + }); + context + .trace + .on_event(MatchEvent::PropagationSummary { depth: idx }); backtrack_isolated_nodes(context, idx + 1, state, matches); @@ -466,7 +618,13 @@ fn backtrack_isolated_nodes( state.used_target_nodes[t_node_idx] = false; } state.node_map[p_node_idx] = None; + context.trace.on_event(MatchEvent::Branch { + include_edge: None, + include_node: Some(t_node_idx), + depth: idx, + }); } + state.exit_frame(context.trace, idx, frame_id); } #[allow(clippy::too_many_arguments)] @@ -478,12 +636,17 @@ fn try_map_node( add_out: usize, state: &mut MatchState, newly_mapped: &mut Vec, + depth: usize, ) -> bool where FN: Fn(&OP, &O) -> bool, { if let Some(existing) = state.node_map[p_node_idx] { if existing.0 != t_node_idx { + context.trace.on_event(MatchEvent::Prune { + reason: "node_mapped_conflict", + depth, + }); return false; } if context.options.injective { @@ -492,15 +655,27 @@ where return true; } if context.options.injective && state.used_target_nodes[t_node_idx] { + context.trace.on_event(MatchEvent::Prune { + reason: "node_used", + depth, + }); return false; } if !(context.node_eq)( &context.pattern.nodes[p_node_idx], &context.target.nodes[t_node_idx], ) { + context.trace.on_event(MatchEvent::Prune { + reason: "label_mismatch", + depth, + }); return false; } if !degree_feasible(context, state, p_node_idx, t_node_idx, add_in, add_out) { + context.trace.on_event(MatchEvent::Prune { + reason: "degree_infeasible", + depth, + }); return false; } @@ -509,6 +684,14 @@ where state.used_target_nodes[t_node_idx] = true; } newly_mapped.push(p_node_idx); + context.trace.on_event(MatchEvent::Branch { + include_edge: None, + include_node: Some(t_node_idx), + depth, + }); + context + .trace + .on_event(MatchEvent::PropagationSummary { depth }); true } @@ -576,13 +759,13 @@ where } // Remaining incident edges on the pattern node after this tentative assignment. - let pattern_remaining_in = context.pattern_in[p_node_idx] - .saturating_sub(state.pattern_mapped_in[p_node_idx] + add_in); + let pattern_remaining_in = + context.pattern_in[p_node_idx].saturating_sub(state.pattern_mapped_in[p_node_idx] + add_in); let pattern_remaining_out = context.pattern_out[p_node_idx] .saturating_sub(state.pattern_mapped_out[p_node_idx] + add_out); // Remaining capacity on the target node to host those edges. - let target_remaining_in = context.target_in[t_node_idx] - .saturating_sub(state.target_mapped_in[t_node_idx] + add_in); + let target_remaining_in = + context.target_in[t_node_idx].saturating_sub(state.target_mapped_in[t_node_idx] + add_in); let target_remaining_out = context.target_out[t_node_idx] .saturating_sub(state.target_mapped_out[t_node_idx] + add_out); diff --git a/tests/lax/matching.rs b/tests/lax/matching.rs index deed093..6004cf9 100644 --- a/tests/lax/matching.rs +++ b/tests/lax/matching.rs @@ -14,7 +14,7 @@ fn test_subgraph_isomorphisms_single_edge() { let p1 = pattern.new_node(1); pattern.new_edge('f', (vec![p0], vec![p1])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 2); assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); @@ -46,7 +46,7 @@ fn test_subgraph_isomorphisms_order_sensitive() { let p1 = pattern.new_node(1); pattern.new_edge('f', (vec![p1, p0], vec![])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert!(matches.is_empty()); } @@ -61,7 +61,7 @@ fn test_subgraph_isomorphisms_isolated_nodes() { pattern.new_node(1); pattern.new_node(2); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); // The pattern's 2-label must map to the unique 2 in the target; the 1-label can map to either 1. assert_eq!(matches.len(), 2); assert!(matches.iter().all(|m| m.node_map()[1] == NodeId(1))); @@ -90,7 +90,7 @@ fn test_subgraph_isomorphisms_shared_nodes() { pattern.new_edge('g', (vec![p0], vec![p1])); pattern.new_edge('h', (vec![p1], vec![p2])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 1); } @@ -107,7 +107,7 @@ fn test_subgraph_isomorphisms_arity_mismatch() { let p2 = pattern.new_node(2); pattern.new_edge('f', (vec![p0, p1], vec![p2])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert!(matches.is_empty()); } @@ -125,7 +125,7 @@ fn test_subgraph_isomorphisms_degree_feasible_prune() { pattern.new_edge('a', (vec![p0], vec![p1])); pattern.new_edge('b', (vec![p0], vec![p2])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert!(matches.is_empty()); } @@ -136,7 +136,7 @@ fn test_subgraph_isomorphisms_empty_pattern() { target.new_node(2); let pattern: Hypergraph = Hypergraph::empty(); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 1); assert!(matches[0].node_map().is_empty()); assert!(matches[0].edge_map().is_empty()); @@ -154,7 +154,7 @@ fn test_subgraph_isomorphisms_multi_incidence_sources() { let p1 = pattern.new_node(1); pattern.new_edge('f', (vec![p0, p0], vec![p1])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 1); assert_eq!(matches[0].node_map()[0], n0); assert_eq!(matches[0].node_map()[1], n1); @@ -183,7 +183,7 @@ fn test_subgraph_isomorphisms_multiple_matches_complex_target() { pattern.new_edge('f', (vec![p0], vec![p1])); pattern.new_edge('g', (vec![p1], vec![p2])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 4); assert!(matches.iter().all(|m| m.node_map()[2] == n4)); } @@ -198,7 +198,7 @@ fn test_subgraph_isomorphisms_node_in_sources_and_targets() { let p0 = pattern.new_node(0); pattern.new_edge('g', (vec![p0], vec![p0])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 1); assert_eq!(matches[0].node_map()[0], n0); assert_eq!(matches[0].edge_map()[0], EdgeId(0)); @@ -217,7 +217,7 @@ fn test_subgraph_isomorphisms_identical_edges_injective() { let p1 = pattern.new_node(1); pattern.new_edge('h', (vec![p0], vec![p1])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 2); let mut edge_ids = matches .iter() @@ -241,7 +241,7 @@ fn test_subgraph_isomorphisms_two_identical_edges_bijective() { pattern.new_edge('h', (vec![p0], vec![p1])); pattern.new_edge('h', (vec![p0], vec![p1])); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 2); let mut edge_maps = matches .iter() @@ -262,7 +262,7 @@ fn test_subgraph_isomorphisms_isolated_nodes_duplicate_labels() { pattern.new_node(1); pattern.new_node(1); - let matches = target.find_subgraph_isomorphisms(&pattern); + let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 6); } @@ -277,10 +277,10 @@ fn test_subgraph_homomorphisms_allow_node_merging() { let p1 = pattern.new_node(0); pattern.new_edge('f', (vec![p0], vec![p1])); - let iso_matches = target.find_subgraph_isomorphisms(&pattern); + let iso_matches = target.find_subgraph_isomorphisms(&pattern, None); assert!(iso_matches.is_empty()); - let homo_matches = target.find_subgraph_homomorphisms(&pattern); + let homo_matches = target.find_subgraph_homomorphisms(&pattern, None); assert_eq!(homo_matches.len(), 1); assert_eq!(homo_matches[0].node_map()[0], n0); assert_eq!(homo_matches[0].node_map()[1], n0); @@ -299,9 +299,9 @@ fn test_subgraph_homomorphisms_allow_edge_merging() { pattern.new_edge('h', (vec![p0], vec![p1])); pattern.new_edge('h', (vec![p0], vec![p1])); - let iso_matches = target.find_subgraph_isomorphisms(&pattern); + let iso_matches = target.find_subgraph_isomorphisms(&pattern, None); assert!(iso_matches.is_empty()); - let homo_matches = target.find_subgraph_homomorphisms(&pattern); + let homo_matches = target.find_subgraph_homomorphisms(&pattern, None); assert_eq!(homo_matches.len(), 1); } From 5d58abb34c08162c8641d02d5ba43caa9313718b Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 3 Feb 2026 13:03:07 +0100 Subject: [PATCH 13/16] add example --- examples/matching_trace.rs | 232 +++++++++++++++++++++++++++++++++++++ src/lax/matching.rs | 58 +++++++--- 2 files changed, 274 insertions(+), 16 deletions(-) create mode 100644 examples/matching_trace.rs diff --git a/examples/matching_trace.rs b/examples/matching_trace.rs new file mode 100644 index 0000000..9ad64a8 --- /dev/null +++ b/examples/matching_trace.rs @@ -0,0 +1,232 @@ +use open_hypergraphs::lax::matching::{MatchEvent, MatchTrace}; +use open_hypergraphs::lax::Hypergraph; +use std::cell::{Cell, RefCell}; +use std::collections::BTreeMap; + +struct PrintTrace { + indent: Cell, + edge_decision: RefCell>, + node_decision: RefCell>>, + edge_assign: RefCell>, + node_assign: RefCell>, + frame_edges: RefCell>>, + frame_nodes: RefCell>>, + frame_stack: RefCell>, +} + +impl PrintTrace { + fn new() -> Self { + Self { + indent: Cell::new(0), + edge_decision: RefCell::new(BTreeMap::new()), + node_decision: RefCell::new(BTreeMap::new()), + edge_assign: RefCell::new(BTreeMap::new()), + node_assign: RefCell::new(BTreeMap::new()), + frame_edges: RefCell::new(BTreeMap::new()), + frame_nodes: RefCell::new(BTreeMap::new()), + frame_stack: RefCell::new(Vec::new()), + } + } + + fn indent(&self) -> usize { + self.indent.get() * 2 + } + + fn print_solution(&self) { + let edge_assign = self.edge_assign.borrow(); + let node_assign = self.node_assign.borrow(); + let mut parts = Vec::new(); + for (p_edge, t_edge) in edge_assign.iter() { + parts.push(format!("e{p_edge}->t{t_edge}")); + } + for (p_node, t_node) in node_assign.iter() { + parts.push(format!("n{p_node}->t{t_node}")); + } + + if parts.is_empty() { + println!("{:indent$}solution", "", indent = self.indent()); + } else { + println!( + "{:indent$}solution [{}]", + "", + parts.join(", "), + indent = self.indent() + ); + } + } +} + +impl MatchTrace for PrintTrace { + fn on_event(&self, event: MatchEvent) { + match event { + MatchEvent::EnterFrame { depth: _, frame_id } => { + println!( + "{:indent$}enter frame #{frame_id}", + "", + indent = self.indent() + ); + self.indent.set(self.indent.get() + 1); + self.frame_stack.borrow_mut().push(frame_id); + } + MatchEvent::ExitFrame { depth, frame_id } => { + let indent = self.indent.get().saturating_sub(1); + self.indent.set(indent); + println!( + "{:indent$}exit frame #{frame_id}", + "", + indent = self.indent() + ); + if let Some(top) = self.frame_stack.borrow_mut().pop() { + self.edge_decision.borrow_mut().remove(&top); + self.node_decision.borrow_mut().remove(&top); + if let Some(edges) = self.frame_edges.borrow_mut().remove(&top) { + let mut edge_assign = self.edge_assign.borrow_mut(); + for p_edge in edges { + edge_assign.remove(&p_edge); + } + } + if let Some(nodes) = self.frame_nodes.borrow_mut().remove(&top) { + let mut node_assign = self.node_assign.borrow_mut(); + for p_node in nodes { + node_assign.remove(&p_node); + } + } + } + } + MatchEvent::Decision { + pattern_edge, + pattern_node, + choice_features, + candidate_count, + heuristic_tag, + depth, + } => { + let current = self.frame_stack.borrow().last().copied(); + if let Some(frame_id) = current { + if let Some(edge) = pattern_edge { + self.edge_decision.borrow_mut().insert(frame_id, edge); + } + if let Some(node) = pattern_node { + self.node_decision + .borrow_mut() + .entry(frame_id) + .or_default() + .push(node); + } + } + println!( + "{:indent$}decision pattern_edge={pattern_edge:?} pattern_node={pattern_node:?} candidates={candidate_count} features={choice_features} heuristic={heuristic_tag}", + "", + indent = self.indent() + ); + } + MatchEvent::Branch { + target_edge, + target_node, + depth, + } => { + let current = self.frame_stack.borrow().last().copied(); + if let Some(frame_id) = current { + if let Some(edge) = target_edge { + if let Some(p_edge) = self.edge_decision.borrow().get(&frame_id) { + self.edge_assign.borrow_mut().insert(*p_edge, edge); + self.frame_edges + .borrow_mut() + .entry(frame_id) + .or_default() + .push(*p_edge); + } + } + if let Some(node) = target_node { + let mut pending = self.node_decision.borrow_mut(); + if let Some(stack) = pending.get_mut(&frame_id) { + if let Some(p_node) = stack.pop() { + self.node_assign.borrow_mut().insert(p_node, node); + self.frame_nodes + .borrow_mut() + .entry(frame_id) + .or_default() + .push(p_node); + } + } + } + } + let frame_id = self.frame_stack.borrow().last().copied(); + let edge_map = target_edge.and_then(|edge| { + let frame_id = frame_id?; + let edge_decision = self.edge_decision.borrow(); + let p_edge = edge_decision.get(&frame_id)?; + Some(format!("map e{p_edge}->t{edge}")) + }); + let node_map = target_node.and_then(|node| { + let frame_id = frame_id?; + let node_decision = self.node_decision.borrow(); + let p_node = node_decision.get(&frame_id)?.last().copied()?; + Some(format!("map n{p_node}->t{node}")) + }); + let map_note = match (edge_map, node_map) { + (Some(edge), Some(node)) => format!("{edge}, {node}"), + (Some(edge), None) => edge, + (None, Some(node)) => node, + (None, None) => String::new(), + }; + println!( + "{:indent$}branch target_edge={target_edge:?} target_node={target_node:?} {map_note}", + "", + indent = self.indent() + ); + } + MatchEvent::PropagationSummary { depth: _ } => { + println!("{:indent$}propagation done", "", indent = self.indent()); + } + MatchEvent::Prune { + reason, + detail, + depth: _, + } => { + println!( + "{:indent$}prune: {reason} ({detail})", + "", + indent = self.indent() + ); + } + MatchEvent::Solution => { + self.print_solution(); + } + } + } +} + +fn build_target() -> Hypergraph { + let mut target = Hypergraph::empty(); + let n0 = target.new_node(0); + let n1 = target.new_node(0); + target.new_edge('f', (vec![n0], vec![n1])); + target.new_edge('f', (vec![n1], vec![n0])); + target.new_edge('f', (vec![n0], vec![n0])); + target.new_edge('f', (vec![n1], vec![n1])); + target +} + +fn build_pattern() -> Hypergraph { + let mut pattern = Hypergraph::empty(); + let p0 = pattern.new_node(0); + let p1 = pattern.new_node(0); + pattern.new_edge('f', (vec![p0], vec![p1])); + pattern +} + +fn main() { + let target = build_target(); + let pattern = build_pattern(); + let trace = PrintTrace::new(); + + println!("=== isomorphisms ==="); + let iso = target.find_subgraph_isomorphisms(&pattern, Some(&trace)); + println!("isomorphisms: {}", iso.len()); + + println!(); + println!("=== homomorphisms ==="); + let homo = target.find_subgraph_homomorphisms(&pattern, Some(&trace)); + println!("homomorphisms: {}", homo.len()); +} diff --git a/src/lax/matching.rs b/src/lax/matching.rs index f983817..962534b 100644 --- a/src/lax/matching.rs +++ b/src/lax/matching.rs @@ -33,16 +33,16 @@ pub enum MatchEvent { frame_id: usize, }, Decision { - pick_edge: Option, - pick_node: Option, + pattern_edge: Option, + pattern_node: Option, choice_features: &'static str, candidate_count: usize, heuristic_tag: &'static str, depth: usize, }, Branch { - include_edge: Option, - include_node: Option, + target_edge: Option, + target_node: Option, depth: usize, }, PropagationSummary { @@ -50,6 +50,7 @@ pub enum MatchEvent { }, Prune { reason: &'static str, + detail: &'static str, depth: usize, }, Solution, @@ -468,6 +469,14 @@ fn backtrack_edges( let frame_id = state.enter_frame(context.trace, edge_index); // If all edges are mapped, fill in remaining isolated nodes. if edge_index == context.edge_order.len() { + context.trace.on_event(MatchEvent::Decision { + pattern_edge: None, + pattern_node: None, + choice_features: "no_more_pattern_edges", + candidate_count: 0, + heuristic_tag: "edge_order", + depth: edge_index, + }); backtrack_isolated_nodes(context, 0, state, matches); state.exit_frame(context.trace, edge_index, frame_id); return; @@ -476,8 +485,8 @@ fn backtrack_edges( let p_edge_idx = context.edge_order[edge_index]; let p_adj = &context.pattern.adjacency[p_edge_idx]; context.trace.on_event(MatchEvent::Decision { - pick_edge: Some(p_edge_idx), - pick_node: None, + pattern_edge: Some(p_edge_idx), + pattern_node: None, choice_features: "edge_order", candidate_count: context.edge_candidates[p_edge_idx].len(), heuristic_tag: "min_candidates_then_arity", @@ -486,13 +495,14 @@ fn backtrack_edges( for &t_edge_idx in &context.edge_candidates[p_edge_idx] { context.trace.on_event(MatchEvent::Branch { - include_edge: Some(t_edge_idx), - include_node: None, + target_edge: Some(t_edge_idx), + target_node: None, depth: edge_index, }); if context.options.injective && state.used_target_edges[t_edge_idx] { context.trace.on_event(MatchEvent::Prune { reason: "edge_used", + detail: "injective_edge_reuse", depth: edge_index, }); continue; @@ -502,6 +512,7 @@ fn backtrack_edges( let Some(newly_mapped) = state.commit_edge_nodes(context, p_adj, t_adj, edge_index) else { context.trace.on_event(MatchEvent::Prune { reason: "node_mapping_failed", + detail: "edge_incidence_conflict", depth: edge_index, }); continue; @@ -566,8 +577,8 @@ fn backtrack_isolated_nodes( let p_node_idx = context.isolated_nodes[idx]; context.trace.on_event(MatchEvent::Decision { - pick_edge: None, - pick_node: Some(p_node_idx), + pattern_edge: None, + pattern_node: Some(p_node_idx), choice_features: "isolated_nodes", candidate_count: context.target.nodes.len(), heuristic_tag: "isolated_nodes_order", @@ -577,6 +588,7 @@ fn backtrack_isolated_nodes( if context.options.injective && state.used_target_nodes[t_node_idx] { context.trace.on_event(MatchEvent::Prune { reason: "node_used", + detail: "injective_node_reuse", depth: idx, }); continue; @@ -584,6 +596,7 @@ fn backtrack_isolated_nodes( if !degree_feasible(context, state, p_node_idx, t_node_idx, 0, 0) { context.trace.on_event(MatchEvent::Prune { reason: "degree_infeasible", + detail: "degree_capacity", depth: idx, }); continue; @@ -594,6 +607,7 @@ fn backtrack_isolated_nodes( ) { context.trace.on_event(MatchEvent::Prune { reason: "label_mismatch", + detail: "node_label", depth: idx, }); continue; @@ -604,8 +618,8 @@ fn backtrack_isolated_nodes( state.used_target_nodes[t_node_idx] = true; } context.trace.on_event(MatchEvent::Branch { - include_edge: None, - include_node: Some(t_node_idx), + target_edge: None, + target_node: Some(t_node_idx), depth: idx, }); context @@ -619,8 +633,8 @@ fn backtrack_isolated_nodes( } state.node_map[p_node_idx] = None; context.trace.on_event(MatchEvent::Branch { - include_edge: None, - include_node: Some(t_node_idx), + target_edge: None, + target_node: Some(t_node_idx), depth: idx, }); } @@ -641,10 +655,19 @@ fn try_map_node( where FN: Fn(&OP, &O) -> bool, { + context.trace.on_event(MatchEvent::Decision { + pattern_edge: None, + pattern_node: Some(p_node_idx), + choice_features: "edge_incidence", + candidate_count: 1, + heuristic_tag: "edge_incidence", + depth, + }); if let Some(existing) = state.node_map[p_node_idx] { if existing.0 != t_node_idx { context.trace.on_event(MatchEvent::Prune { reason: "node_mapped_conflict", + detail: "edge_incidence_conflict", depth, }); return false; @@ -657,6 +680,7 @@ where if context.options.injective && state.used_target_nodes[t_node_idx] { context.trace.on_event(MatchEvent::Prune { reason: "node_used", + detail: "injective_node_reuse", depth, }); return false; @@ -667,6 +691,7 @@ where ) { context.trace.on_event(MatchEvent::Prune { reason: "label_mismatch", + detail: "node_label", depth, }); return false; @@ -674,6 +699,7 @@ where if !degree_feasible(context, state, p_node_idx, t_node_idx, add_in, add_out) { context.trace.on_event(MatchEvent::Prune { reason: "degree_infeasible", + detail: "degree_capacity", depth, }); return false; @@ -685,8 +711,8 @@ where } newly_mapped.push(p_node_idx); context.trace.on_event(MatchEvent::Branch { - include_edge: None, - include_node: Some(t_node_idx), + target_edge: None, + target_node: Some(t_node_idx), depth, }); context From 49c95c6d61c2644a2bea70ed551cb1cb667e7fd7 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 3 Feb 2026 13:11:22 +0100 Subject: [PATCH 14/16] format --- src/lax/hypergraph.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lax/hypergraph.rs b/src/lax/hypergraph.rs index 8d18ea2..3d709ba 100644 --- a/src/lax/hypergraph.rs +++ b/src/lax/hypergraph.rs @@ -336,7 +336,6 @@ impl Hypergraph { } } - impl Hypergraph { /// Construct a [`Hypergraph`] by identifying nodes in the quotient map. /// Mutably quotient this [`Hypergraph`], returning the coequalizer calculated from `self.quotient`. From 2a0fb3b9746215fc29d7ccc8961ecd5a71bef360 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 3 Feb 2026 13:16:56 +0100 Subject: [PATCH 15/16] more tests --- examples/matching_trace.rs | 6 ++--- tests/lax/matching.rs | 48 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/examples/matching_trace.rs b/examples/matching_trace.rs index 9ad64a8..8f994f8 100644 --- a/examples/matching_trace.rs +++ b/examples/matching_trace.rs @@ -68,7 +68,7 @@ impl MatchTrace for PrintTrace { self.indent.set(self.indent.get() + 1); self.frame_stack.borrow_mut().push(frame_id); } - MatchEvent::ExitFrame { depth, frame_id } => { + MatchEvent::ExitFrame { depth: _, frame_id } => { let indent = self.indent.get().saturating_sub(1); self.indent.set(indent); println!( @@ -99,7 +99,7 @@ impl MatchTrace for PrintTrace { choice_features, candidate_count, heuristic_tag, - depth, + depth: _, } => { let current = self.frame_stack.borrow().last().copied(); if let Some(frame_id) = current { @@ -123,7 +123,7 @@ impl MatchTrace for PrintTrace { MatchEvent::Branch { target_edge, target_node, - depth, + depth: _, } => { let current = self.frame_stack.borrow().last().copied(); if let Some(frame_id) = current { diff --git a/tests/lax/matching.rs b/tests/lax/matching.rs index 6004cf9..09e27d4 100644 --- a/tests/lax/matching.rs +++ b/tests/lax/matching.rs @@ -1,5 +1,39 @@ use open_hypergraphs::lax::{EdgeId, Hypergraph, NodeId}; +fn assert_is_morphism( + target: &Hypergraph, + pattern: &Hypergraph, + morphism: &open_hypergraphs::lax::matching::Morphism, + node_eq: impl Fn(&OP, &O) -> bool, + edge_eq: impl Fn(&AP, &A) -> bool, +) { + // Node labels must be preserved. + for (p_idx, p_label) in pattern.nodes.iter().enumerate() { + let t_idx = morphism.node_map()[p_idx].0; + assert!(node_eq(p_label, &target.nodes[t_idx])); + } + + // Edge labels and incidence (ordered sources/targets) must be preserved. + for (p_edge_idx, p_label) in pattern.edges.iter().enumerate() { + let t_edge_idx = morphism.edge_map()[p_edge_idx].0; + assert!(edge_eq(p_label, &target.edges[t_edge_idx])); + + let p_adj = &pattern.adjacency[p_edge_idx]; + let t_adj = &target.adjacency[t_edge_idx]; + assert_eq!(p_adj.sources.len(), t_adj.sources.len()); + assert_eq!(p_adj.targets.len(), t_adj.targets.len()); + + for (p_node, t_node) in p_adj.sources.iter().zip(t_adj.sources.iter()) { + let mapped = morphism.node_map()[p_node.0]; + assert_eq!(mapped, *t_node); + } + for (p_node, t_node) in p_adj.targets.iter().zip(t_adj.targets.iter()) { + let mapped = morphism.node_map()[p_node.0]; + assert_eq!(mapped, *t_node); + } + } +} + #[test] fn test_subgraph_isomorphisms_single_edge() { let mut target = Hypergraph::empty(); @@ -26,6 +60,7 @@ fn test_subgraph_isomorphisms_single_edge() { assert_eq!(sources, vec![0, 2]); for m in matches { + assert_is_morphism(&target, &pattern, &m, |a, b| a == b, |a, b| a == b); if m.node_map()[0] == NodeId(0) { assert_eq!(m.edge_map()[0], EdgeId(0)); } else { @@ -72,6 +107,9 @@ fn test_subgraph_isomorphisms_isolated_nodes() { .collect::>(); sources.sort(); assert_eq!(sources, vec![0, 2]); + for m in matches { + assert_is_morphism(&target, &pattern, &m, |a, b| a == b, |a, b| a == b); + } } #[test] @@ -156,6 +194,7 @@ fn test_subgraph_isomorphisms_multi_incidence_sources() { let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 1); + assert_is_morphism(&target, &pattern, &matches[0], |a, b| a == b, |a, b| a == b); assert_eq!(matches[0].node_map()[0], n0); assert_eq!(matches[0].node_map()[1], n1); assert_eq!(matches[0].edge_map()[0], EdgeId(0)); @@ -200,6 +239,7 @@ fn test_subgraph_isomorphisms_node_in_sources_and_targets() { let matches = target.find_subgraph_isomorphisms(&pattern, None); assert_eq!(matches.len(), 1); + assert_is_morphism(&target, &pattern, &matches[0], |a, b| a == b, |a, b| a == b); assert_eq!(matches[0].node_map()[0], n0); assert_eq!(matches[0].edge_map()[0], EdgeId(0)); } @@ -225,6 +265,9 @@ fn test_subgraph_isomorphisms_identical_edges_injective() { .collect::>(); edge_ids.sort(); assert_eq!(edge_ids, vec![0, 1]); + for m in matches { + assert_is_morphism(&target, &pattern, &m, |a, b| a == b, |a, b| a == b); + } } #[test] @@ -249,6 +292,9 @@ fn test_subgraph_isomorphisms_two_identical_edges_bijective() { .collect::>(); edge_maps.sort(); assert_eq!(edge_maps, vec![(0, 1), (1, 0)]); + for m in matches { + assert_is_morphism(&target, &pattern, &m, |a, b| a == b, |a, b| a == b); + } } #[test] @@ -284,6 +330,7 @@ fn test_subgraph_homomorphisms_allow_node_merging() { assert_eq!(homo_matches.len(), 1); assert_eq!(homo_matches[0].node_map()[0], n0); assert_eq!(homo_matches[0].node_map()[1], n0); + assert_is_morphism(&target, &pattern, &homo_matches[0], |a, b| a == b, |a, b| a == b); } #[test] @@ -304,4 +351,5 @@ fn test_subgraph_homomorphisms_allow_edge_merging() { let homo_matches = target.find_subgraph_homomorphisms(&pattern, None); assert_eq!(homo_matches.len(), 1); + assert_is_morphism(&target, &pattern, &homo_matches[0], |a, b| a == b, |a, b| a == b); } From a509ab7f95ee9355fa4d9c0a1df0f02449e47ae1 Mon Sep 17 00:00:00 2001 From: mstn Date: Tue, 3 Feb 2026 13:20:02 +0100 Subject: [PATCH 16/16] format --- tests/lax/matching.rs | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/tests/lax/matching.rs b/tests/lax/matching.rs index 09e27d4..41d5a25 100644 --- a/tests/lax/matching.rs +++ b/tests/lax/matching.rs @@ -330,7 +330,13 @@ fn test_subgraph_homomorphisms_allow_node_merging() { assert_eq!(homo_matches.len(), 1); assert_eq!(homo_matches[0].node_map()[0], n0); assert_eq!(homo_matches[0].node_map()[1], n0); - assert_is_morphism(&target, &pattern, &homo_matches[0], |a, b| a == b, |a, b| a == b); + assert_is_morphism( + &target, + &pattern, + &homo_matches[0], + |a, b| a == b, + |a, b| a == b, + ); } #[test] @@ -351,5 +357,11 @@ fn test_subgraph_homomorphisms_allow_edge_merging() { let homo_matches = target.find_subgraph_homomorphisms(&pattern, None); assert_eq!(homo_matches.len(), 1); - assert_is_morphism(&target, &pattern, &homo_matches[0], |a, b| a == b, |a, b| a == b); + assert_is_morphism( + &target, + &pattern, + &homo_matches[0], + |a, b| a == b, + |a, b| a == b, + ); }