diff --git a/sensor/Cargo.toml b/sensor/Cargo.toml new file mode 100644 index 0000000..13b1803 --- /dev/null +++ b/sensor/Cargo.toml @@ -0,0 +1,24 @@ +[workspace] + +[package] +name = "iii-sensor" +version = "0.1.0" +edition = "2021" +publish = false + +[[bin]] +name = "iii-sensor" +path = "src/main.rs" + +[dependencies] +iii-sdk = { version = "0.10.0", features = ["otel"] } +tokio = { version = "1", features = ["rt-multi-thread", "macros", "sync", "signal", "process"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +serde_yaml = "0.9" +anyhow = "1" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] } +clap = { version = "4", features = ["derive"] } +chrono = { version = "0.4", features = ["serde"] } +walkdir = "2" diff --git a/sensor/README.md b/sensor/README.md new file mode 100644 index 0000000..85b5dbf --- /dev/null +++ b/sensor/README.md @@ -0,0 +1,68 @@ +# iii-sensor + +The code your AI writes today is the context it reads tomorrow. Every session silently degrades your architecture unless you measure it. iii-sensor scans your codebase, computes a quality score across 5 dimensions (complexity, coupling, cohesion, size, duplication), saves baselines before agent sessions, and flags when quality drops. It's the feedback sensor in the harness engineering loop. + +**Plug and play:** Build with `cargo build --release`, then run `./target/release/iii-sensor --url ws://your-engine:49134`. It registers 6 functions. Call `sensor::baseline` before a coding session, then `sensor::compare` after to see what changed. Wire `sensor::gate` into your CI to reject PRs that degrade quality below your threshold. + +## Functions + +| Function ID | Description | +|---|---| +| `sensor::scan` | Walk a directory and compute per-file code quality metrics | +| `sensor::score` | Aggregate quality score (0-100) from scan results using weighted power mean | +| `sensor::baseline` | Save a named baseline snapshot for later comparison | +| `sensor::compare` | Compare a fresh scan against a saved baseline to detect degradation | +| `sensor::gate` | CI quality gate returning pass/fail on score thresholds | +| `sensor::history` | Retrieve historical scores and compute trend direction | + +## iii Primitives Used + +- **State** -- baselines, score history, latest scan results (keyed by path hash) +- **HTTP** -- all functions exposed as REST endpoints + +## Prerequisites + +- Rust 1.75+ +- Running iii engine on `ws://127.0.0.1:49134` + +## Build + +```bash +cargo build --release +``` + +## Usage + +```bash +./target/release/iii-sensor --url ws://127.0.0.1:49134 --config ./config.yaml +``` + +``` +Options: + --config Path to config.yaml [default: ./config.yaml] + --url WebSocket URL of the iii engine [default: ws://127.0.0.1:49134] + --manifest Output module manifest as JSON and exit + -h, --help Print help +``` + +## Configuration + +```yaml +scan_extensions: ["rs", "ts", "py", "js", "go"] # file extensions to include +max_file_size_kb: 512 # skip files larger than this +score_weights: + complexity: 0.25 + coupling: 0.25 + cohesion: 0.20 + size: 0.15 + duplication: 0.15 +thresholds: + degradation_pct: 10.0 # dimension drop % that flags degradation + min_score: 60.0 # minimum passing score for quality gate +``` + +## Tests + +```bash +cargo test +``` diff --git a/sensor/SPEC.md b/sensor/SPEC.md new file mode 100644 index 0000000..f11fef3 --- /dev/null +++ b/sensor/SPEC.md @@ -0,0 +1,299 @@ +# iii-sensor + +Code quality feedback sensor for the III engine. Analyzes source code structure, computes quality scores, saves baselines, and detects degradation after agent coding sessions. Inspired by Sentrux. + +## Functions + +### `sensor::scan` + +Walks a directory and computes per-file code quality metrics. + +**Input:** +```json +{ + "path": "/path/to/scan", + "extensions": ["rs", "ts"] +} +``` + +- `path` (required) — directory to scan +- `extensions` (optional) — file extensions to include; defaults to config value + +**Output:** +```json +{ + "files": [ + { + "path": "/path/to/file.rs", + "language": "rust", + "line_count": 150, + "code_lines": 120, + "complexity": 14, + "max_depth": 4, + "function_count": 8, + "avg_function_length": 15.0, + "import_count": 5 + } + ], + "summary": { + "total_files": 42, + "total_lines": 3200, + "avg_complexity": 8.5, + "languages": { + "rust": { "files": 30, "lines": 2400 }, + "typescript": { "files": 12, "lines": 800 } + } + } +} +``` + +**Side effects:** Stores result in `sensor:latest:{path_hash}` state scope. + +--- + +### `sensor::score` + +Computes an aggregate quality score (0-100) from scan results using a weighted geometric mean across five dimensions. + +**Input (option A — scan inline):** +```json +{ "path": "/path/to/scan" } +``` + +**Input (option B — pre-computed):** +```json +{ "scan_result": { "files": [...], "summary": {...} } } +``` + +**Output:** +```json +{ + "score": 78.3, + "dimensions": { + "complexity": 85.0, + "coupling": 72.0, + "cohesion": 68.5, + "size": 90.0, + "duplication": 85.0 + }, + "grade": "C", + "file_count": 42, + "timestamp": "2026-04-06T12:00:00Z" +} +``` + +**Side effects:** Appends score to `sensor:history:{path_hash}` state scope. + +--- + +### `sensor::baseline` + +Runs scan + score and saves the result as a named baseline snapshot for later comparison. + +**Input:** +```json +{ + "path": "/path/to/scan", + "label": "pre-session" +} +``` + +- `label` (optional) — defaults to `"default"` + +**Output:** +```json +{ + "baseline_id": "a1b2c3d4:pre-session", + "score": 78.3, + "dimensions": { ... }, + "timestamp": "2026-04-06T12:00:00Z", + "label": "pre-session", + "file_count": 42, + "grade": "C" +} +``` + +**Side effects:** Stores baseline in `sensor:baselines:{path_hash}` state scope under the label key. + +--- + +### `sensor::compare` + +Runs a fresh scan + score and compares it against a saved baseline to detect degradation. + +**Input:** +```json +{ + "path": "/path/to/scan", + "baseline_id": "a1b2c3d4:pre-session", + "label": "pre-session" +} +``` + +- `baseline_id` or `label` — identifies which baseline to compare against; defaults to `"default"` + +**Output:** +```json +{ + "degraded": true, + "overall_delta": -5.2, + "dimension_deltas": { + "complexity": -8.0, + "coupling": +2.0, + "cohesion": -3.5, + "size": -1.0, + "duplication": 0.0 + }, + "baseline_score": 78.3, + "current_score": 73.1, + "degraded_dimensions": ["complexity", "cohesion"], + "timestamp": "2026-04-06T13:00:00Z" +} +``` + +A dimension is flagged as degraded when it drops more than `thresholds.degradation_pct` percent from the baseline value. + +--- + +### `sensor::gate` + +CI quality gate that returns pass/fail based on absolute score thresholds and degradation limits. + +**Input:** +```json +{ + "path": "/path/to/scan", + "min_score": 60, + "max_degradation_pct": 10 +} +``` + +- `min_score` (optional) — defaults to config `thresholds.min_score` (60) +- `max_degradation_pct` (optional) — defaults to config `thresholds.degradation_pct` (10) + +**Output:** +```json +{ + "passed": false, + "score": 58.2, + "grade": "F", + "reason": "score 58.2 below minimum 60.0", + "details": { + "dimensions": { ... }, + "file_count": 42, + "min_score": 60, + "max_degradation_pct": 10 + } +} +``` + +Gate checks both absolute score and degradation against the `default` baseline (if one exists). + +--- + +### `sensor::history` + +Retrieves historical quality scores and computes trend direction. + +**Input:** +```json +{ + "path": "/path/to/scan", + "limit": 20 +} +``` + +**Output:** +```json +{ + "scores": [ + { "score": 78.3, "dimensions": {...}, "timestamp": "...", "grade": "C" }, + { "score": 75.1, "dimensions": {...}, "timestamp": "...", "grade": "C" } + ], + "total_entries": 15, + "trend": "degrading" +} +``` + +Trend is computed from the oldest to newest score in the window: +- `improving` — score increased by more than 2 points +- `degrading` — score decreased by more than 2 points +- `stable` — within 2 points + +--- + +## Scoring Methodology + +Each file contributes to five dimensions, each scored 0-100: + +| Dimension | What it measures | Penalty | +|-----------|-----------------|---------| +| **Complexity** | Branching keywords per 100 code lines | >5/100 lines, -5 points per extra | +| **Coupling** | Average import count per file | >5 imports, -3 points per extra | +| **Cohesion** | Function count proportional to file size | Deviation from ideal ratio (1 fn per 20 lines) | +| **Size** | Average code lines per file | >100 lines, -0.2 points per extra | +| **Duplication** | Similar line patterns | Placeholder (fixed at 85 for v1) | + +The final score uses a **weighted power mean** (approximation of Nash Social Welfare / geometric mean) so that gaming one dimension at the expense of others is penalized: + +``` +score = complexity^0.25 * coupling^0.25 * cohesion^0.20 * size^0.15 * duplication^0.15 +``` + +Grades: A (90+), B (80-89), C (70-79), D (60-69), F (<60) + +--- + +## State Scopes + +| Scope | Key | Value | +|-------|-----|-------| +| `sensor:baselines:{path_hash}` | label string | Baseline snapshot JSON | +| `sensor:history:{path_hash}` | `"scores"` | Array of score results | +| `sensor:latest:{path_hash}` | `"scan"` | Most recent scan result | + +`path_hash` is a deterministic hex hash of the scanned directory path using `DefaultHasher`. + +--- + +## Triggers + +Each function is registered as an iii-engine function callable via `iii.trigger()`: + +- `sensor::scan` — HTTP trigger +- `sensor::score` — HTTP trigger +- `sensor::baseline` — HTTP trigger +- `sensor::compare` — HTTP trigger +- `sensor::gate` — HTTP trigger +- `sensor::history` — HTTP trigger + +Queue trigger on `sensor.scan.requested` topic is planned for async scan jobs (v2). + +--- + +## Configuration + +`config.yaml`: + +```yaml +scan_extensions: ["rs", "ts", "py", "js", "go"] +max_file_size_kb: 512 +score_weights: + complexity: 0.25 + coupling: 0.25 + cohesion: 0.20 + size: 0.15 + duplication: 0.15 +thresholds: + degradation_pct: 10.0 + min_score: 60.0 +``` + +--- + +## CLI + +``` +iii-sensor --config ./config.yaml --url ws://127.0.0.1:49134 +iii-sensor --manifest # output module manifest JSON and exit +``` diff --git a/sensor/build.rs b/sensor/build.rs new file mode 100644 index 0000000..81caa36 --- /dev/null +++ b/sensor/build.rs @@ -0,0 +1,6 @@ +fn main() { + println!( + "cargo:rustc-env=TARGET={}", + std::env::var("TARGET").unwrap() + ); +} diff --git a/sensor/config.yaml b/sensor/config.yaml new file mode 100644 index 0000000..ef4e560 --- /dev/null +++ b/sensor/config.yaml @@ -0,0 +1,11 @@ +scan_extensions: ["rs", "ts", "py", "js", "go"] +max_file_size_kb: 512 +score_weights: + complexity: 0.25 + coupling: 0.25 + cohesion: 0.20 + size: 0.15 + duplication: 0.15 +thresholds: + degradation_pct: 10.0 + min_score: 60.0 diff --git a/sensor/src/analysis.rs b/sensor/src/analysis.rs new file mode 100644 index 0000000..64db367 --- /dev/null +++ b/sensor/src/analysis.rs @@ -0,0 +1,491 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::Path; +use walkdir::WalkDir; + +use crate::config::SensorConfig; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileMetrics { + pub path: String, + pub language: String, + pub line_count: usize, + pub code_lines: usize, + pub complexity: usize, + pub max_depth: usize, + pub function_count: usize, + pub avg_function_length: f64, + pub import_count: usize, + pub duplicate_lines: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LanguageSummary { + pub files: usize, + pub lines: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ScanSummary { + pub total_files: usize, + pub total_lines: usize, + pub avg_complexity: f64, + pub languages: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ScanResult { + pub files: Vec, + pub summary: ScanSummary, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DimensionScores { + pub complexity: f64, + pub coupling: f64, + pub cohesion: f64, + pub size: f64, + pub duplication: f64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ScoreResult { + pub score: f64, + pub dimensions: DimensionScores, + pub grade: String, + pub file_count: usize, + pub timestamp: String, +} + +pub fn detect_language(ext: &str) -> &str { + match ext { + "rs" => "rust", + "ts" | "tsx" => "typescript", + "js" | "jsx" => "javascript", + "py" => "python", + "go" => "go", + _ => "unknown", + } +} + +pub fn count_complexity(content: &str, ext: &str) -> usize { + let keywords: &[&str] = match ext { + "rs" => &[ + "if ", "else ", "match ", "for ", "while ", "loop ", "?", ".unwrap(", + ], + "ts" | "tsx" | "js" | "jsx" => &[ + "if ", "else ", "switch ", "for ", "while ", "try ", "catch ", "? ", + ], + "py" => &[ + "if ", "elif ", "else:", "for ", "while ", "try:", "except ", "with ", + ], + "go" => &["if ", "else ", "switch ", "for ", "select ", "case "], + _ => &["if ", "else ", "for ", "while "], + }; + content + .lines() + .map(|line| { + let trimmed = line.trim(); + keywords.iter().filter(|kw| trimmed.contains(**kw)).count() + }) + .sum() +} + +pub fn count_max_depth(content: &str) -> usize { + let mut max_depth: usize = 0; + let mut current_depth: usize = 0; + for line in content.lines() { + for ch in line.chars() { + if ch == '{' { + current_depth += 1; + if current_depth > max_depth { + max_depth = current_depth; + } + } else if ch == '}' { + current_depth = current_depth.saturating_sub(1); + } + } + } + max_depth +} + +pub fn count_functions(content: &str, ext: &str) -> usize { + let patterns: &[&str] = match ext { + "rs" => &["fn "], + "ts" | "tsx" | "js" | "jsx" => &["function ", "=> {", "=> ("], + "py" => &["def "], + "go" => &["func "], + _ => &["fn ", "def ", "function ", "func "], + }; + content + .lines() + .filter(|line| { + let trimmed = line.trim(); + patterns.iter().any(|p| trimmed.contains(p)) + }) + .count() +} + +pub fn count_imports(content: &str, ext: &str) -> usize { + content + .lines() + .filter(|line| { + let trimmed = line.trim(); + match ext { + "rs" => trimmed.starts_with("use "), + "ts" | "tsx" | "js" | "jsx" => { + trimmed.starts_with("import ") || trimmed.starts_with("require(") + } + "py" => trimmed.starts_with("import ") || trimmed.starts_with("from "), + "go" => trimmed.starts_with("import ") || trimmed.starts_with("\""), + _ => false, + } + }) + .count() +} + +pub fn count_code_lines(content: &str) -> usize { + content + .lines() + .filter(|line| { + let trimmed = line.trim(); + !trimmed.is_empty() + && !trimmed.starts_with("//") + && !trimmed.starts_with('#') + && !trimmed.starts_with("/*") + && !trimmed.starts_with('*') + && !trimmed.starts_with("*/") + }) + .count() +} + +pub fn count_duplicate_lines(content: &str) -> usize { + let mut seen: HashMap<&str, usize> = HashMap::new(); + for line in content.lines() { + let trimmed = line.trim(); + if trimmed.len() >= 10 { + *seen.entry(trimmed).or_insert(0) += 1; + } + } + seen.values().filter(|&&count| count > 1).map(|c| c - 1).sum() +} + +pub fn analyze_file(path: &Path, ext: &str) -> Option { + let content = std::fs::read_to_string(path).ok()?; + let line_count = content.lines().count(); + let code_lines = count_code_lines(&content); + let complexity = count_complexity(&content, ext); + let max_depth = count_max_depth(&content); + let function_count = count_functions(&content, ext); + let avg_function_length = if function_count > 0 { + code_lines as f64 / function_count as f64 + } else { + code_lines as f64 + }; + let import_count = count_imports(&content, ext); + let duplicate_lines = count_duplicate_lines(&content); + + Some(FileMetrics { + path: path.to_string_lossy().to_string(), + language: detect_language(ext).to_string(), + line_count, + code_lines, + complexity, + max_depth, + function_count, + avg_function_length, + import_count, + duplicate_lines, + }) +} + +pub fn scan_directory(dir: &str, extensions: &[String], max_file_size_kb: u64) -> ScanResult { + let max_bytes = max_file_size_kb * 1024; + let mut files = Vec::new(); + let mut languages: HashMap = HashMap::new(); + + for entry in WalkDir::new(dir) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.file_type().is_file()) + { + let path = entry.path(); + let ext = match path.extension().and_then(|e| e.to_str()) { + Some(e) => e.to_string(), + None => continue, + }; + + if !extensions.iter().any(|allowed| allowed == &ext) { + continue; + } + + if let Ok(metadata) = std::fs::metadata(path) { + if metadata.len() > max_bytes { + continue; + } + } + + if let Some(metrics) = analyze_file(path, &ext) { + let lang_entry = languages + .entry(metrics.language.clone()) + .or_insert(LanguageSummary { files: 0, lines: 0 }); + lang_entry.files += 1; + lang_entry.lines += metrics.line_count; + files.push(metrics); + } + } + + let total_files = files.len(); + let total_lines: usize = files.iter().map(|f| f.line_count).sum(); + let avg_complexity = if total_files > 0 { + files.iter().map(|f| f.complexity as f64).sum::() / total_files as f64 + } else { + 0.0 + }; + + ScanResult { + files, + summary: ScanSummary { + total_files, + total_lines, + avg_complexity, + languages, + }, + } +} + +pub fn geometric_mean(scores: &[f64]) -> f64 { + if scores.is_empty() || scores.iter().any(|s| *s <= 0.0) { + return 0.0; + } + let ln_sum: f64 = scores.iter().map(|s| s.ln()).sum::() / scores.len() as f64; + ln_sum.exp() +} + +pub fn compute_score(scan: &ScanResult, config: &SensorConfig) -> ScoreResult { + let files = &scan.files; + let total = files.len().max(1) as f64; + + let avg_complexity_per_100 = if total > 0.0 { + files + .iter() + .map(|f| { + if f.code_lines > 0 { + (f.complexity as f64 / f.code_lines as f64) * 100.0 + } else { + 0.0 + } + }) + .sum::() + / total + } else { + 0.0 + }; + let complexity_score = (100.0 - (avg_complexity_per_100 - 5.0).max(0.0) * 5.0).clamp(0.0, 100.0); + + let avg_imports = files.iter().map(|f| f.import_count as f64).sum::() / total; + let coupling_score = (100.0 - (avg_imports - 5.0).max(0.0) * 3.0).clamp(0.0, 100.0); + + let cohesion_raw: f64 = files + .iter() + .map(|f| { + if f.function_count > 0 { + let ideal = (f.code_lines as f64 / 20.0).max(1.0); + let ratio = f.function_count as f64 / ideal; + (1.0 - (ratio - 1.0).abs()).max(0.0) * 100.0 + } else { + 50.0 + } + }) + .sum::() + / total; + let cohesion_score = cohesion_raw.clamp(0.0, 100.0); + + let avg_lines = files.iter().map(|f| f.code_lines as f64).sum::() / total; + let size_score = (100.0 - (avg_lines - 100.0).max(0.0) * 0.2).clamp(0.0, 100.0); + + let avg_dup_ratio = files + .iter() + .map(|f| { + if f.code_lines > 0 { + f.duplicate_lines as f64 / f.code_lines as f64 + } else { + 0.0 + } + }) + .sum::() + / total; + let duplication_score = (100.0 - avg_dup_ratio * 200.0).clamp(0.0, 100.0); + + let weighted_scores = vec![ + complexity_score, + coupling_score, + cohesion_score, + size_score, + duplication_score, + ]; + + let weights = [ + config.score_weights.complexity, + config.score_weights.coupling, + config.score_weights.cohesion, + config.score_weights.size, + config.score_weights.duplication, + ]; + + let weighted: Vec = weighted_scores + .iter() + .zip(weights.iter()) + .map(|(s, w)| (s.max(1.0)) * w) + .collect(); + + let score = geometric_mean(&weighted).clamp(0.0, 100.0); + + let grade = match score as u32 { + 90..=100 => "A", + 80..=89 => "B", + 70..=79 => "C", + 60..=69 => "D", + _ => "F", + } + .to_string(); + + let timestamp = chrono::Utc::now().to_rfc3339(); + + ScoreResult { + score, + dimensions: DimensionScores { + complexity: complexity_score, + coupling: coupling_score, + cohesion: cohesion_score, + size: size_score, + duplication: duplication_score, + }, + grade, + file_count: files.len(), + timestamp, + } +} + +pub fn hash_path(path: &str) -> String { + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + let mut hasher = DefaultHasher::new(); + path.hash(&mut hasher); + format!("{:x}", hasher.finish()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_count_complexity_rust() { + let code = r#" +fn main() { + if x > 0 { + for i in 0..10 { + while running { + match cmd { + _ => {} + } + } + } + } +} +"#; + let c = count_complexity(code, "rs"); + assert!(c >= 4); + } + + #[test] + fn test_count_complexity_python() { + let code = "if x:\n for i in range(10):\n while True:\n pass\n"; + let c = count_complexity(code, "py"); + assert!(c >= 3); + } + + #[test] + fn test_count_max_depth() { + let code = "fn main() {\n if true {\n {\n }\n }\n}\n"; + assert_eq!(count_max_depth(code), 3); + } + + #[test] + fn test_count_functions_rust() { + let code = "fn main() {}\nfn helper() {}\npub fn public_fn() {}\n"; + assert_eq!(count_functions(code, "rs"), 3); + } + + #[test] + fn test_count_functions_python() { + let code = "def main():\n pass\ndef helper():\n pass\n"; + assert_eq!(count_functions(code, "py"), 2); + } + + #[test] + fn test_count_imports_rust() { + let code = "use std::io;\nuse serde::Serialize;\nfn main() {}\n"; + assert_eq!(count_imports(code, "rs"), 2); + } + + #[test] + fn test_count_code_lines() { + let code = "fn main() {\n // comment\n\n let x = 1;\n}\n"; + assert_eq!(count_code_lines(code), 3); + } + + #[test] + fn test_geometric_mean() { + let scores = vec![100.0, 100.0, 100.0]; + assert!((geometric_mean(&scores) - 100.0).abs() < 0.01); + + let empty: Vec = vec![]; + assert_eq!(geometric_mean(&empty), 0.0); + + let with_zero = vec![0.0, 50.0]; + assert_eq!(geometric_mean(&with_zero), 0.0); + } + + #[test] + fn test_hash_path_deterministic() { + let h1 = hash_path("/tmp/test"); + let h2 = hash_path("/tmp/test"); + assert_eq!(h1, h2); + + let h3 = hash_path("/tmp/other"); + assert_ne!(h1, h3); + } + + #[test] + fn test_detect_language() { + assert_eq!(detect_language("rs"), "rust"); + assert_eq!(detect_language("ts"), "typescript"); + assert_eq!(detect_language("py"), "python"); + assert_eq!(detect_language("go"), "go"); + assert_eq!(detect_language("xyz"), "unknown"); + } + + #[test] + fn test_count_duplicate_lines() { + let code = "let x = something_long_enough;\nlet x = something_long_enough;\nshort\n"; + assert_eq!(count_duplicate_lines(code), 1); + } + + #[test] + fn test_compute_score_empty() { + let scan = ScanResult { + files: vec![], + summary: ScanSummary { + total_files: 0, + total_lines: 0, + avg_complexity: 0.0, + languages: HashMap::new(), + }, + }; + let config = SensorConfig::default(); + let result = compute_score(&scan, &config); + assert!(result.score >= 0.0); + assert!(!result.grade.is_empty()); + } +} diff --git a/sensor/src/config.rs b/sensor/src/config.rs new file mode 100644 index 0000000..f683b85 --- /dev/null +++ b/sensor/src/config.rs @@ -0,0 +1,157 @@ +use anyhow::Result; +use serde::Deserialize; + +#[derive(Deserialize, Debug, Clone)] +pub struct SensorConfig { + #[serde(default = "default_extensions")] + pub scan_extensions: Vec, + #[serde(default = "default_max_file_size_kb")] + pub max_file_size_kb: u64, + #[serde(default)] + pub score_weights: ScoreWeights, + #[serde(default)] + pub thresholds: Thresholds, +} + +fn default_extensions() -> Vec { + vec![ + "rs".into(), + "ts".into(), + "py".into(), + "js".into(), + "go".into(), + ] +} + +fn default_max_file_size_kb() -> u64 { + 512 +} + +#[derive(Deserialize, Debug, Clone)] +pub struct ScoreWeights { + #[serde(default = "default_complexity_weight")] + pub complexity: f64, + #[serde(default = "default_coupling_weight")] + pub coupling: f64, + #[serde(default = "default_cohesion_weight")] + pub cohesion: f64, + #[serde(default = "default_size_weight")] + pub size: f64, + #[serde(default = "default_duplication_weight")] + pub duplication: f64, +} + +fn default_complexity_weight() -> f64 { + 0.25 +} +fn default_coupling_weight() -> f64 { + 0.25 +} +fn default_cohesion_weight() -> f64 { + 0.20 +} +fn default_size_weight() -> f64 { + 0.15 +} +fn default_duplication_weight() -> f64 { + 0.15 +} + +impl Default for ScoreWeights { + fn default() -> Self { + ScoreWeights { + complexity: default_complexity_weight(), + coupling: default_coupling_weight(), + cohesion: default_cohesion_weight(), + size: default_size_weight(), + duplication: default_duplication_weight(), + } + } +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Thresholds { + #[serde(default = "default_degradation_pct")] + pub degradation_pct: f64, + #[serde(default = "default_min_score")] + pub min_score: f64, +} + +fn default_degradation_pct() -> f64 { + 10.0 +} + +fn default_min_score() -> f64 { + 60.0 +} + +impl Default for Thresholds { + fn default() -> Self { + Thresholds { + degradation_pct: default_degradation_pct(), + min_score: default_min_score(), + } + } +} + +impl Default for SensorConfig { + fn default() -> Self { + SensorConfig { + scan_extensions: default_extensions(), + max_file_size_kb: default_max_file_size_kb(), + score_weights: ScoreWeights::default(), + thresholds: Thresholds::default(), + } + } +} + +pub fn load_config(path: &str) -> Result { + let contents = std::fs::read_to_string(path)?; + let config: SensorConfig = serde_yaml::from_str(&contents)?; + Ok(config) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_config_defaults() { + let config: SensorConfig = serde_yaml::from_str("{}").unwrap(); + assert_eq!(config.scan_extensions.len(), 5); + assert_eq!(config.max_file_size_kb, 512); + assert!((config.score_weights.complexity - 0.25).abs() < f64::EPSILON); + assert!((config.thresholds.degradation_pct - 10.0).abs() < f64::EPSILON); + assert!((config.thresholds.min_score - 60.0).abs() < f64::EPSILON); + } + + #[test] + fn test_config_custom() { + let yaml = r#" +scan_extensions: ["rs", "go"] +max_file_size_kb: 256 +score_weights: + complexity: 0.30 + coupling: 0.20 + cohesion: 0.20 + size: 0.15 + duplication: 0.15 +thresholds: + degradation_pct: 5.0 + min_score: 70.0 +"#; + let config: SensorConfig = serde_yaml::from_str(yaml).unwrap(); + assert_eq!(config.scan_extensions.len(), 2); + assert_eq!(config.max_file_size_kb, 256); + assert!((config.score_weights.complexity - 0.30).abs() < f64::EPSILON); + assert!((config.thresholds.degradation_pct - 5.0).abs() < f64::EPSILON); + assert!((config.thresholds.min_score - 70.0).abs() < f64::EPSILON); + } + + #[test] + fn test_sensor_config_default() { + let config = SensorConfig::default(); + assert_eq!(config.scan_extensions.len(), 5); + assert_eq!(config.max_file_size_kb, 512); + } +} diff --git a/sensor/src/functions/baseline.rs b/sensor/src/functions/baseline.rs new file mode 100644 index 0000000..504b940 --- /dev/null +++ b/sensor/src/functions/baseline.rs @@ -0,0 +1,54 @@ +use std::sync::Arc; + +use iii_sdk::{IIIError, III}; +use serde_json::Value; + +use crate::analysis::{compute_score, hash_path, scan_directory}; +use crate::config::SensorConfig; +use crate::state; + +pub async fn handle(iii: Arc, config: Arc, payload: Value) -> Result { + let path = payload + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| IIIError::Handler("missing required field: path".to_string()))? + .to_string(); + + let label = payload + .get("label") + .and_then(|v| v.as_str()) + .unwrap_or("default") + .to_string(); + + let extensions = config.scan_extensions.clone(); + let max_kb = config.max_file_size_kb; + let scan_result = tokio::task::spawn_blocking({ + let path = path.clone(); + move || scan_directory(&path, &extensions, max_kb) + }) + .await + .map_err(|e| IIIError::Handler(format!("scan task failed: {e}")))?; + + let score_result = compute_score(&scan_result, &config); + + let path_hash = hash_path(&path); + let baseline_id = format!("{path_hash}:{label}"); + let timestamp = chrono::Utc::now().to_rfc3339(); + + let baseline = serde_json::json!({ + "baseline_id": baseline_id, + "score": score_result.score, + "dimensions": score_result.dimensions, + "timestamp": timestamp, + "label": label, + "file_count": score_result.file_count, + "grade": score_result.grade, + }); + + let scope = format!("sensor:baselines:{path_hash}"); + state::state_set(&iii, &scope, &label, baseline.clone()) + .await + .map_err(|e| IIIError::Handler(format!("failed to save baseline: {e}")))?; + + Ok(baseline) +} diff --git a/sensor/src/functions/compare.rs b/sensor/src/functions/compare.rs new file mode 100644 index 0000000..68666db --- /dev/null +++ b/sensor/src/functions/compare.rs @@ -0,0 +1,126 @@ +use std::sync::Arc; + +use iii_sdk::{IIIError, III}; +use serde_json::Value; + +use crate::analysis::{compute_score, hash_path, scan_directory, DimensionScores}; +use crate::config::SensorConfig; +use crate::state; + +pub async fn handle(iii: Arc, config: Arc, payload: Value) -> Result { + let path = payload + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| IIIError::Handler("missing required field: path".to_string()))? + .to_string(); + + let label = payload + .get("label") + .and_then(|v| v.as_str()) + .unwrap_or("default") + .to_string(); + + let path_hash = hash_path(&path); + + let baseline_key = if let Some(bid) = payload.get("baseline_id").and_then(|v| v.as_str()) { + let parts: Vec<&str> = bid.splitn(2, ':').collect(); + if parts.len() == 2 { + parts[1].to_string() + } else { + label.clone() + } + } else { + label.clone() + }; + + let scope = format!("sensor:baselines:{path_hash}"); + let baseline_val = state::state_get(&iii, &scope, &baseline_key) + .await + .map_err(|e| IIIError::Handler(format!("failed to load baseline: {e}")))?; + + if baseline_val.is_null() { + return Err(IIIError::Handler(format!( + "no baseline found for label '{baseline_key}' at path '{path}'" + ))); + } + + let baseline_score = baseline_val + .get("score") + .and_then(|v| v.as_f64()) + .unwrap_or(0.0); + + let baseline_dims: DimensionScores = serde_json::from_value( + baseline_val + .get("dimensions") + .cloned() + .unwrap_or(serde_json::json!({})), + ) + .unwrap_or(DimensionScores { + complexity: 0.0, + coupling: 0.0, + cohesion: 0.0, + size: 0.0, + duplication: 0.0, + }); + + let extensions = config.scan_extensions.clone(); + let max_kb = config.max_file_size_kb; + let scan_result = tokio::task::spawn_blocking({ + let path = path.clone(); + move || scan_directory(&path, &extensions, max_kb) + }) + .await + .map_err(|e| IIIError::Handler(format!("scan task failed: {e}")))?; + + let current = compute_score(&scan_result, &config); + + let overall_delta = current.score - baseline_score; + + let dimension_deltas = serde_json::json!({ + "complexity": current.dimensions.complexity - baseline_dims.complexity, + "coupling": current.dimensions.coupling - baseline_dims.coupling, + "cohesion": current.dimensions.cohesion - baseline_dims.cohesion, + "size": current.dimensions.size - baseline_dims.size, + "duplication": current.dimensions.duplication - baseline_dims.duplication, + }); + + let threshold = config.thresholds.degradation_pct; + let mut degraded_dimensions = Vec::new(); + + let check = |_name: &str, current_val: f64, baseline_val: f64| -> bool { + if baseline_val > 0.0 { + let pct_drop = ((baseline_val - current_val) / baseline_val) * 100.0; + pct_drop > threshold + } else { + false + } + }; + + if check("complexity", current.dimensions.complexity, baseline_dims.complexity) { + degraded_dimensions.push("complexity"); + } + if check("coupling", current.dimensions.coupling, baseline_dims.coupling) { + degraded_dimensions.push("coupling"); + } + if check("cohesion", current.dimensions.cohesion, baseline_dims.cohesion) { + degraded_dimensions.push("cohesion"); + } + if check("size", current.dimensions.size, baseline_dims.size) { + degraded_dimensions.push("size"); + } + if check("duplication", current.dimensions.duplication, baseline_dims.duplication) { + degraded_dimensions.push("duplication"); + } + + let degraded = !degraded_dimensions.is_empty(); + + Ok(serde_json::json!({ + "degraded": degraded, + "overall_delta": overall_delta, + "dimension_deltas": dimension_deltas, + "baseline_score": baseline_score, + "current_score": current.score, + "degraded_dimensions": degraded_dimensions, + "timestamp": current.timestamp, + })) +} diff --git a/sensor/src/functions/gate.rs b/sensor/src/functions/gate.rs new file mode 100644 index 0000000..707a1f7 --- /dev/null +++ b/sensor/src/functions/gate.rs @@ -0,0 +1,87 @@ +use std::sync::Arc; + +use iii_sdk::{IIIError, III}; +use serde_json::Value; + +use crate::analysis::{compute_score, hash_path, scan_directory}; +use crate::config::SensorConfig; +use crate::state; + +pub async fn handle(iii: Arc, config: Arc, payload: Value) -> Result { + let path = payload + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| IIIError::Handler("missing required field: path".to_string()))? + .to_string(); + + let min_score = payload + .get("min_score") + .and_then(|v| v.as_f64()) + .unwrap_or(config.thresholds.min_score); + + let max_degradation_pct = payload + .get("max_degradation_pct") + .and_then(|v| v.as_f64()) + .unwrap_or(config.thresholds.degradation_pct); + + let extensions = config.scan_extensions.clone(); + let max_kb = config.max_file_size_kb; + let scan_result = tokio::task::spawn_blocking({ + let path = path.clone(); + move || scan_directory(&path, &extensions, max_kb) + }) + .await + .map_err(|e| IIIError::Handler(format!("scan task failed: {e}")))?; + + let current = compute_score(&scan_result, &config); + + let mut passed = true; + let mut reasons: Vec = Vec::new(); + + if current.score < min_score { + passed = false; + reasons.push(format!( + "score {:.1} below minimum {:.1}", + current.score, min_score + )); + } + + let path_hash = hash_path(&path); + let scope = format!("sensor:baselines:{path_hash}"); + if let Ok(baseline_val) = state::state_get(&iii, &scope, "default").await { + if !baseline_val.is_null() { + if let Some(baseline_score) = baseline_val.get("score").and_then(|v| v.as_f64()) { + if baseline_score > 0.0 { + let degradation = + ((baseline_score - current.score) / baseline_score) * 100.0; + if degradation > max_degradation_pct { + passed = false; + reasons.push(format!( + "degradation {:.1}% exceeds maximum {:.1}%", + degradation, max_degradation_pct + )); + } + } + } + } + } + + let reason = if reasons.is_empty() { + None + } else { + Some(reasons.join("; ")) + }; + + Ok(serde_json::json!({ + "passed": passed, + "score": current.score, + "grade": current.grade, + "reason": reason, + "details": { + "dimensions": current.dimensions, + "file_count": current.file_count, + "min_score": min_score, + "max_degradation_pct": max_degradation_pct, + } + })) +} diff --git a/sensor/src/functions/history.rs b/sensor/src/functions/history.rs new file mode 100644 index 0000000..9f0eb5b --- /dev/null +++ b/sensor/src/functions/history.rs @@ -0,0 +1,68 @@ +use std::sync::Arc; + +use iii_sdk::{IIIError, III}; +use serde_json::Value; + +use crate::analysis::hash_path; +use crate::config::SensorConfig; +use crate::state; + +pub async fn handle(iii: Arc, _config: Arc, payload: Value) -> Result { + let path = payload + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| IIIError::Handler("missing required field: path".to_string()))?; + + let limit = payload + .get("limit") + .and_then(|v| v.as_u64()) + .unwrap_or(20) as usize; + + let path_hash = hash_path(path); + let scope = format!("sensor:history:{path_hash}"); + + let scores_val = state::state_get(&iii, &scope, "scores").await; + + let scores: Vec = scores_val + .ok() + .and_then(|v| { + if v.is_null() { + None + } else { + serde_json::from_value(v).ok() + } + }) + .unwrap_or_default(); + + let total = scores.len(); + let limited: Vec = scores.into_iter().rev().take(limit).collect(); + + let trend = if limited.len() < 2 { + "stable" + } else { + let first_score = limited + .last() + .and_then(|v| v.get("score")) + .and_then(|v| v.as_f64()) + .unwrap_or(0.0); + let last_score = limited + .first() + .and_then(|v| v.get("score")) + .and_then(|v| v.as_f64()) + .unwrap_or(0.0); + let delta = last_score - first_score; + if delta > 2.0 { + "improving" + } else if delta < -2.0 { + "degrading" + } else { + "stable" + } + }; + + Ok(serde_json::json!({ + "scores": limited, + "total_entries": total, + "trend": trend, + })) +} diff --git a/sensor/src/functions/mod.rs b/sensor/src/functions/mod.rs new file mode 100644 index 0000000..64a1625 --- /dev/null +++ b/sensor/src/functions/mod.rs @@ -0,0 +1,6 @@ +pub mod baseline; +pub mod compare; +pub mod gate; +pub mod history; +pub mod scan; +pub mod score; diff --git a/sensor/src/functions/scan.rs b/sensor/src/functions/scan.rs new file mode 100644 index 0000000..13d1738 --- /dev/null +++ b/sensor/src/functions/scan.rs @@ -0,0 +1,46 @@ +use std::sync::Arc; + +use iii_sdk::{IIIError, III}; +use serde_json::Value; + +use crate::analysis::scan_directory; +use crate::config::SensorConfig; +use crate::state; + +pub async fn handle(iii: Arc, config: Arc, payload: Value) -> Result { + let path = payload + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| IIIError::Handler("missing required field: path".to_string()))? + .to_string(); + + let extensions = match payload.get("extensions").and_then(|v| v.as_array()) { + Some(arr) => arr + .iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect(), + None => config.scan_extensions.clone(), + }; + + let scan_result = tokio::task::spawn_blocking({ + let path = path.clone(); + let max_kb = config.max_file_size_kb; + move || scan_directory(&path, &extensions, max_kb) + }) + .await + .map_err(|e| IIIError::Handler(format!("scan task failed: {e}")))?; + + let result_value = serde_json::to_value(&scan_result) + .map_err(|e| IIIError::Handler(format!("serialization failed: {e}")))?; + + let path_hash = crate::analysis::hash_path(&path); + let _ = state::state_set( + &iii, + &format!("sensor:latest:{path_hash}"), + "scan", + result_value.clone(), + ) + .await; + + Ok(result_value) +} diff --git a/sensor/src/functions/score.rs b/sensor/src/functions/score.rs new file mode 100644 index 0000000..b466f09 --- /dev/null +++ b/sensor/src/functions/score.rs @@ -0,0 +1,62 @@ +use std::sync::Arc; + +use iii_sdk::{IIIError, III}; +use serde_json::Value; + +use crate::analysis::{compute_score, hash_path, scan_directory, ScanResult}; +use crate::config::SensorConfig; +use crate::state; + +pub async fn handle(iii: Arc, config: Arc, payload: Value) -> Result { + let scan_result: ScanResult = if let Some(sr) = payload.get("scan_result") { + serde_json::from_value(sr.clone()) + .map_err(|e| IIIError::Handler(format!("invalid scan_result: {e}")))? + } else { + let path = payload + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| { + IIIError::Handler("missing required field: path or scan_result".to_string()) + })? + .to_string(); + + let extensions = config.scan_extensions.clone(); + let max_kb = config.max_file_size_kb; + tokio::task::spawn_blocking(move || scan_directory(&path, &extensions, max_kb)) + .await + .map_err(|e| IIIError::Handler(format!("scan task failed: {e}")))? + }; + + let score_result = compute_score(&scan_result, &config); + + let result_value = serde_json::to_value(&score_result) + .map_err(|e| IIIError::Handler(format!("serialization failed: {e}")))?; + + if let Some(path) = payload.get("path").and_then(|v| v.as_str()) { + let path_hash = hash_path(path); + + let history_scope = format!("sensor:history:{path_hash}"); + let existing = state::state_get(&iii, &history_scope, "scores").await; + let mut scores: Vec = existing + .ok() + .and_then(|v| { + if v.is_null() { + None + } else { + serde_json::from_value(v).ok() + } + }) + .unwrap_or_default(); + + scores.push(result_value.clone()); + let _ = state::state_set( + &iii, + &history_scope, + "scores", + serde_json::to_value(&scores).unwrap_or_default(), + ) + .await; + } + + Ok(result_value) +} diff --git a/sensor/src/main.rs b/sensor/src/main.rs new file mode 100644 index 0000000..d903251 --- /dev/null +++ b/sensor/src/main.rs @@ -0,0 +1,330 @@ +use anyhow::Result; +use clap::Parser; +use iii_sdk::{register_worker, InitOptions, OtelConfig, RegisterFunctionMessage}; +use std::sync::Arc; + +mod analysis; +mod config; +mod functions; +mod manifest; +mod state; + +#[derive(Parser, Debug)] +#[command(name = "iii-sensor", about = "III engine code quality sensor")] +struct Cli { + #[arg(long, default_value = "./config.yaml")] + config: String, + + #[arg(long, default_value = "ws://127.0.0.1:49134")] + url: String, + + #[arg(long)] + manifest: bool, +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt() + .with_env_filter( + tracing_subscriber::EnvFilter::try_from_default_env() + .unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info")), + ) + .init(); + + let cli = Cli::parse(); + + if cli.manifest { + let manifest = manifest::build_manifest(); + println!("{}", serde_json::to_string_pretty(&manifest).unwrap()); + return Ok(()); + } + + let sensor_config = match config::load_config(&cli.config) { + Ok(c) => { + tracing::info!( + extensions = ?c.scan_extensions, + max_file_size_kb = c.max_file_size_kb, + "loaded config from {}", + cli.config + ); + c + } + Err(e) => { + tracing::warn!(error = %e, path = %cli.config, "failed to load config, using defaults"); + config::SensorConfig::default() + } + }; + + let cfg = Arc::new(sensor_config); + + tracing::info!(url = %cli.url, "connecting to III engine"); + + let iii = register_worker( + &cli.url, + InitOptions { + otel: Some(OtelConfig::default()), + ..Default::default() + }, + ); + + let iii_arc = Arc::new(iii.clone()); + + { + let iii_c = iii_arc.clone(); + let cfg_c = cfg.clone(); + iii.register_function(( + RegisterFunctionMessage { + id: "sensor::scan".to_string(), + description: Some( + "Scan a directory and compute per-file code quality metrics".to_string(), + ), + request_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "path": { "type": "string", "description": "Directory path to scan" }, + "extensions": { + "type": "array", + "items": { "type": "string" }, + "description": "File extensions to include (defaults to config)" + } + }, + "required": ["path"] + })), + response_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "files": { "type": "array" }, + "summary": { "type": "object" } + } + })), + metadata: None, + invocation: None, + }, + move |payload: serde_json::Value| { + let iii_c = iii_c.clone(); + let cfg_c = cfg_c.clone(); + Box::pin(async move { functions::scan::handle(iii_c, cfg_c, payload).await }) + as std::pin::Pin< + Box> + Send>, + > + }, + )); + } + + { + let iii_c = iii_arc.clone(); + let cfg_c = cfg.clone(); + iii.register_function(( + RegisterFunctionMessage { + id: "sensor::score".to_string(), + description: Some( + "Compute aggregate quality score from scan results".to_string(), + ), + request_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "path": { "type": "string", "description": "Directory path to scan and score" }, + "scan_result": { "type": "object", "description": "Pre-computed scan result" } + } + })), + response_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "score": { "type": "number" }, + "dimensions": { "type": "object" }, + "grade": { "type": "string" }, + "file_count": { "type": "integer" }, + "timestamp": { "type": "string" } + } + })), + metadata: None, + invocation: None, + }, + move |payload: serde_json::Value| { + let iii_c = iii_c.clone(); + let cfg_c = cfg_c.clone(); + Box::pin(async move { functions::score::handle(iii_c, cfg_c, payload).await }) + as std::pin::Pin< + Box> + Send>, + > + }, + )); + } + + { + let iii_c = iii_arc.clone(); + let cfg_c = cfg.clone(); + iii.register_function(( + RegisterFunctionMessage { + id: "sensor::baseline".to_string(), + description: Some( + "Save current quality score as a named baseline snapshot".to_string(), + ), + request_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "path": { "type": "string", "description": "Directory path to baseline" }, + "label": { "type": "string", "description": "Baseline label (default: 'default')" } + }, + "required": ["path"] + })), + response_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "baseline_id": { "type": "string" }, + "score": { "type": "number" }, + "dimensions": { "type": "object" }, + "timestamp": { "type": "string" }, + "label": { "type": "string" } + } + })), + metadata: None, + invocation: None, + }, + move |payload: serde_json::Value| { + let iii_c = iii_c.clone(); + let cfg_c = cfg_c.clone(); + Box::pin(async move { functions::baseline::handle(iii_c, cfg_c, payload).await }) + as std::pin::Pin< + Box> + Send>, + > + }, + )); + } + + { + let iii_c = iii_arc.clone(); + let cfg_c = cfg.clone(); + iii.register_function(( + RegisterFunctionMessage { + id: "sensor::compare".to_string(), + description: Some( + "Compare current quality against a saved baseline and detect degradation" + .to_string(), + ), + request_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "path": { "type": "string", "description": "Directory path to compare" }, + "baseline_id": { "type": "string", "description": "Specific baseline ID" }, + "label": { "type": "string", "description": "Baseline label to compare against" } + }, + "required": ["path"] + })), + response_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "degraded": { "type": "boolean" }, + "overall_delta": { "type": "number" }, + "dimension_deltas": { "type": "object" }, + "baseline_score": { "type": "number" }, + "current_score": { "type": "number" }, + "degraded_dimensions": { "type": "array" }, + "timestamp": { "type": "string" } + } + })), + metadata: None, + invocation: None, + }, + move |payload: serde_json::Value| { + let iii_c = iii_c.clone(); + let cfg_c = cfg_c.clone(); + Box::pin(async move { functions::compare::handle(iii_c, cfg_c, payload).await }) + as std::pin::Pin< + Box> + Send>, + > + }, + )); + } + + { + let iii_c = iii_arc.clone(); + let cfg_c = cfg.clone(); + iii.register_function(( + RegisterFunctionMessage { + id: "sensor::gate".to_string(), + description: Some( + "CI quality gate — pass/fail based on score thresholds and degradation limits" + .to_string(), + ), + request_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "path": { "type": "string", "description": "Directory path to gate" }, + "min_score": { "type": "number", "description": "Minimum passing score (default: 60)" }, + "max_degradation_pct": { "type": "number", "description": "Max allowed degradation % (default: 10)" } + }, + "required": ["path"] + })), + response_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "passed": { "type": "boolean" }, + "score": { "type": "number" }, + "grade": { "type": "string" }, + "reason": { "type": "string" }, + "details": { "type": "object" } + } + })), + metadata: None, + invocation: None, + }, + move |payload: serde_json::Value| { + let iii_c = iii_c.clone(); + let cfg_c = cfg_c.clone(); + Box::pin(async move { functions::gate::handle(iii_c, cfg_c, payload).await }) + as std::pin::Pin< + Box> + Send>, + > + }, + )); + } + + { + let iii_c = iii_arc.clone(); + let cfg_c = cfg.clone(); + iii.register_function(( + RegisterFunctionMessage { + id: "sensor::history".to_string(), + description: Some( + "Retrieve historical quality scores and detect trend direction".to_string(), + ), + request_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "path": { "type": "string", "description": "Directory path to get history for" }, + "limit": { "type": "integer", "description": "Max entries to return (default: 20)" } + }, + "required": ["path"] + })), + response_format: Some(serde_json::json!({ + "type": "object", + "properties": { + "scores": { "type": "array" }, + "total_entries": { "type": "integer" }, + "trend": { "type": "string", "enum": ["improving", "stable", "degrading"] } + } + })), + metadata: None, + invocation: None, + }, + move |payload: serde_json::Value| { + let iii_c = iii_c.clone(); + let cfg_c = cfg_c.clone(); + Box::pin(async move { functions::history::handle(iii_c, cfg_c, payload).await }) + as std::pin::Pin< + Box> + Send>, + > + }, + )); + } + + tracing::info!("all sensor functions registered, waiting for invocations"); + + tokio::signal::ctrl_c().await?; + + tracing::info!("iii-sensor shutting down"); + iii.shutdown_async().await; + + Ok(()) +} diff --git a/sensor/src/manifest.rs b/sensor/src/manifest.rs new file mode 100644 index 0000000..a402a96 --- /dev/null +++ b/sensor/src/manifest.rs @@ -0,0 +1,62 @@ +use serde::Serialize; + +#[derive(Serialize)] +pub struct ModuleManifest { + pub name: String, + pub version: String, + pub description: String, + pub default_config: serde_json::Value, + pub supported_targets: Vec, +} + +pub fn build_manifest() -> ModuleManifest { + ModuleManifest { + name: env!("CARGO_PKG_NAME").to_string(), + version: env!("CARGO_PKG_VERSION").to_string(), + description: "III engine code quality sensor — scans, scores, baselines, and gates" + .to_string(), + default_config: serde_json::json!({ + "scan_extensions": ["rs", "ts", "py", "js", "go"], + "max_file_size_kb": 512, + "score_weights": { + "complexity": 0.25, + "coupling": 0.25, + "cohesion": 0.20, + "size": 0.15, + "duplication": 0.15 + }, + "thresholds": { + "degradation_pct": 10.0, + "min_score": 60.0 + } + }), + supported_targets: vec![env!("TARGET").to_string()], + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_manifest_json_output() { + let manifest = build_manifest(); + let json = serde_json::to_string_pretty(&manifest).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(parsed.is_object()); + assert_eq!(parsed["name"], "iii-sensor"); + assert_eq!(parsed["version"], env!("CARGO_PKG_VERSION")); + } + + #[test] + fn test_manifest_has_required_fields() { + let manifest = build_manifest(); + let json = serde_json::to_string_pretty(&manifest).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&json).unwrap(); + assert!(parsed["default_config"]["scan_extensions"].is_array()); + assert_eq!(parsed["default_config"]["max_file_size_kb"], 512); + assert_eq!(parsed["default_config"]["score_weights"]["complexity"], 0.25); + assert_eq!(parsed["default_config"]["thresholds"]["min_score"], 60.0); + assert!(!manifest.supported_targets.is_empty()); + } +} diff --git a/sensor/src/state.rs b/sensor/src/state.rs new file mode 100644 index 0000000..2bd1d7f --- /dev/null +++ b/sensor/src/state.rs @@ -0,0 +1,33 @@ +use iii_sdk::{IIIError, TriggerRequest, III}; +use serde_json::Value; + + + +pub async fn state_get(iii: &III, scope: &str, key: &str) -> Result { + let payload = serde_json::json!({ + "scope": scope, + "key": key, + }); + iii.trigger(TriggerRequest { + function_id: "state::get".to_string(), + payload, + action: None, + timeout_ms: Some(5000), + }) + .await +} + +pub async fn state_set(iii: &III, scope: &str, key: &str, value: Value) -> Result { + let payload = serde_json::json!({ + "scope": scope, + "key": key, + "value": value, + }); + iii.trigger(TriggerRequest { + function_id: "state::set".to_string(), + payload, + action: None, + timeout_ms: Some(5000), + }) + .await +}