Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 18 additions & 7 deletions src/cli/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1647,6 +1647,7 @@ async fn do_analysis(
find_unused_expressions,
find_unused_definitions,
block_until_next_analysis: false,
send_progress_report: true,
});

// Poll for results, showing progress bar if analysis is in progress
Expand Down Expand Up @@ -1698,15 +1699,25 @@ async fn do_analysis(
return;
} else {
// Update progress bar
pb.set_length(result.total_files.max(1) as u64);
pb.set_position(result.files_analyzed as u64);
pb.set_message(format!(
"{} ({}/{} files)",
result.phase, result.files_analyzed, result.total_files
));
let is_analyzing = result.files_analyzed > 0;
if is_analyzing {
pb.set_length(result.total_files_to_analyze.max(1) as u64);
pb.set_position(result.files_analyzed as u64);
pb.set_message(format!(
"{} ({}/{} files)",
result.phase, result.files_analyzed, result.total_files_to_analyze
));
} else {
pb.set_length(result.total_files_to_scan.max(1) as u64);
pb.set_position(result.files_scanned as u64);
pb.set_message(format!(
"{} ({}/{} files)",
result.phase, result.files_scanned, result.total_files_to_scan
));
}

// Wait a bit before polling again (100ms for responsive UI)
std::thread::sleep(Duration::from_millis(100));
tokio::time::sleep(Duration::from_millis(100)).await;
}
}
Ok(Message::Error(err)) => {
Expand Down
5 changes: 1 addition & 4 deletions src/language_server/server_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,7 @@ impl ServerBasedBackend {
client
.log_message(
MessageType::INFO,
format!(
"Server analysis in progress: {} ({}%)",
response.phase, response.progress_percent
),
format!("Server analysis in progress: {}", response.phase),
)
.await;
// Don't update diagnostics while analysis is in progress
Expand Down
1 change: 1 addition & 0 deletions src/language_server/server_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,7 @@ impl ServerConnection {
find_unused_expressions,
find_unused_definitions,
block_until_next_analysis,
send_progress_report: false,
});

match socket.request(&request).await {
Expand Down
37 changes: 20 additions & 17 deletions src/orchestrator/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use scanner::{ScanFilesResult, scan_files};
use std::fs;
use std::io::{self, Write};
use std::sync::atomic::{AtomicU32, Ordering};
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use unused_symbols::find_unused_definitions;
Expand Down Expand Up @@ -72,7 +73,12 @@ impl Default for SuccessfulScanData {
}
}

use std::sync::atomic::AtomicU32;
pub struct AnalysisProgress {
pub files_scanned: Arc<AtomicU32>,
pub total_files_to_scan: Arc<AtomicU32>,
pub files_analyzed: Arc<AtomicU32>,
pub total_files_to_analyze: Arc<AtomicU32>,
}

pub fn scan_and_analyze<F: FnOnce()>(
stubs_dirs: Vec<String>,
Expand Down Expand Up @@ -104,8 +110,6 @@ pub fn scan_and_analyze<F: FnOnce()>(
language_server_changes,
chaos_monkey,
None,
None,
None,
)
}

Expand All @@ -123,15 +127,19 @@ pub fn scan_and_analyze_with_progress<F: FnOnce()>(
previous_analysis_result: Option<AnalysisResult>,
language_server_changes: Option<FxHashMap<String, FileStatus>>,
chaos_monkey: F,
files_scanned: Option<Arc<AtomicU32>>,
total_files_to_scan: Option<Arc<AtomicU32>>,
files_analyzed: Option<Arc<AtomicU32>>,
analysis_progress: Option<AnalysisProgress>,
) -> io::Result<(AnalysisResult, SuccessfulScanData)> {
let mut all_scanned_dirs = stubs_dirs.clone();
all_scanned_dirs.push(config.root_dir.clone());

let file_discovery_and_scanning_now = Instant::now();

let files_scanned = analysis_progress.as_ref().map(|p| p.files_scanned.clone());
let total_files_to_scan = analysis_progress
.as_ref()
.map(|p| p.total_files_to_scan.clone());
let files_analyzed = analysis_progress.as_ref().map(|p| p.files_analyzed.clone());

logger.log_sync("Scanning files");

let ScanFilesResult {
Expand Down Expand Up @@ -267,6 +275,12 @@ pub fn scan_and_analyze_with_progress<F: FnOnce()>(
);

logger.log_sync(&format!("Analyzing {} files", files_to_analyze.len()));
if let Some(total_files_to_analyze) = analysis_progress
.as_ref()
.map(|p| p.total_files_to_analyze.clone())
{
total_files_to_analyze.store(files_to_analyze.len() as u32, Ordering::Relaxed);
}

let mut pure_file_analysis_time = Duration::default();

Expand Down Expand Up @@ -484,17 +498,6 @@ fn emit_duplicate_definition_issues(
issues
}

/// Progress information passed to the progress callback.
#[derive(Debug, Clone)]
pub struct AnalysisProgress {
/// Current phase name.
pub phase: String,
/// Number of files analyzed so far (only meaningful during "Analyzing" phase).
pub files_analyzed: u32,
/// Total number of files to analyze.
pub total_files: u32,
}

fn get_analysis_ready(
config: &Arc<Config>,
codebase: CodebaseInfo,
Expand Down
18 changes: 12 additions & 6 deletions src/protocol/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -672,6 +672,7 @@ impl Serialize for GetIssuesRequest {
write_bool(buf, self.find_unused_expressions);
write_bool(buf, self.find_unused_definitions);
write_bool(buf, self.block_until_next_analysis);
write_bool(buf, self.send_progress_report);
}
}

Expand All @@ -681,12 +682,14 @@ impl Deserialize for GetIssuesRequest {
let (find_unused_expressions, rest) = read_bool(rest)?;
let (find_unused_definitions, rest) = read_bool(rest)?;
let (block_until_next_analysis, rest) = read_bool(rest)?;
let (send_progress_report, rest) = read_bool(rest)?;
Ok((
Self {
filter,
find_unused_expressions,
find_unused_definitions,
block_until_next_analysis,
send_progress_report,
},
rest,
))
Expand All @@ -702,10 +705,11 @@ impl Serialize for GetIssuesResponse {
for issue in &self.issues {
issue.serialize(buf);
}
write_u32(buf, self.files_scanned);
write_u32(buf, self.total_files_to_scan);
write_u32(buf, self.files_analyzed);
write_u32(buf, self.total_files);
write_u32(buf, self.total_files_to_analyze);
write_string(buf, &self.phase);
write_u8(buf, self.progress_percent);
}
}

Expand All @@ -719,18 +723,20 @@ impl Deserialize for GetIssuesResponse {
issues.push(issue);
rest = r;
}
let (files_scanned, rest) = read_u32(rest)?;
let (total_files_to_scan, rest) = read_u32(rest)?;
let (files_analyzed, rest) = read_u32(rest)?;
let (total_files, rest) = read_u32(rest)?;
let (total_files_to_analyze, rest) = read_u32(rest)?;
let (phase, rest) = read_string(rest)?;
let (progress_percent, rest) = read_u8(rest)?;
Ok((
Self {
analysis_complete,
issues,
files_scanned,
total_files_to_scan,
files_analyzed,
total_files,
total_files_to_analyze,
phase,
progress_percent,
},
rest,
))
Expand Down
10 changes: 7 additions & 3 deletions src/protocol/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,8 @@ pub struct GetIssuesRequest {
pub find_unused_definitions: bool,
/// Whether to wait until the next analysis run.
pub block_until_next_analysis: bool,
/// Whether to send progress reports before the analysis is complete.
pub send_progress_report: bool,
}

/// Response with current issues.
Expand All @@ -245,14 +247,16 @@ pub struct GetIssuesResponse {
pub analysis_complete: bool,
/// Issues found during analysis (may be partial if analysis_complete is false).
pub issues: Vec<ProtocolIssue>,
/// Number of files scanned so far.
pub files_scanned: u32,
/// Total number of files to scan (0 if unknown).
pub total_files_to_scan: u32,
/// Number of files analyzed so far.
pub files_analyzed: u32,
/// Total number of files to analyze (0 if unknown).
pub total_files: u32,
pub total_files_to_analyze: u32,
/// Current analysis phase description.
pub phase: String,
/// Progress percentage (0-100).
pub progress_percent: u8,
}

/// Request for server status.
Expand Down
41 changes: 32 additions & 9 deletions src/server/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,14 +270,33 @@ impl RequestHandler {
req: hakana_protocol::GetIssuesRequest,
) -> Message {
if !req.block_until_next_analysis {
let state = self.state.lock().unwrap();
let analysis_complete = !state.is_analysis_in_progress();
let analysis_result = {
let state = self.state.lock().unwrap();
state
.analysis_data
.as_ref()
.filter(|_| !state.is_analysis_in_progress())
.map(|r| r.clone())
};

if analysis_complete && let Some(analysis_result) = &state.analysis_data {
return self.create_get_issues_response(req, analysis_result);
if let Some(analysis_result) = analysis_result {
return self.create_get_issues_response(req, &analysis_result);
}
}

if req.send_progress_report {
let state = self.state.lock().unwrap();
return Message::GetIssuesResult(GetIssuesResponse {
analysis_complete: false,
issues: vec![],
files_scanned: state.files_scanned(),
total_files_to_scan: state.total_files_to_scan(),
files_analyzed: state.files_analyzed(),
total_files_to_analyze: state.total_files_to_analyze(),
phase: state.phase().to_string(),
});
}

if let Ok(result) = analysis_rx.recv().await
&& let Ok(result) = result.as_ref()
{
Expand All @@ -286,10 +305,11 @@ impl RequestHandler {
Message::GetIssuesResult(GetIssuesResponse {
analysis_complete: false,
issues: vec![],
files_scanned: 0,
total_files_to_scan: 0,
files_analyzed: 0,
total_files: 0,
total_files_to_analyze: 0,
phase: "Complete".to_string(),
progress_percent: 100,
})
}

Expand Down Expand Up @@ -318,13 +338,16 @@ impl RequestHandler {
self.logger
.log_sync(&format!("Returning {} issues", issues.len()));

let state = self.state.lock().unwrap();

return Message::GetIssuesResult(GetIssuesResponse {
analysis_complete: true,
issues,
files_analyzed: 0,
total_files: 0,
files_scanned: state.files_scanned(),
total_files_to_scan: state.total_files_to_scan(),
files_analyzed: state.files_analyzed(),
total_files_to_analyze: state.total_files_to_analyze(),
phase: "Complete".to_string(),
progress_percent: 100,
});
}

Expand Down
41 changes: 37 additions & 4 deletions src/server/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,16 @@ use hakana_analyzer::config::Config;
use hakana_analyzer::custom_hook::CustomHook;
use hakana_code_info::analysis_result::AnalysisResult;
use hakana_logger::Logger;
use hakana_orchestrator::SuccessfulScanData;
use hakana_orchestrator::file::FileStatus;
use hakana_orchestrator::{AnalysisProgress, SuccessfulScanData};
use hakana_protocol::{
ClientConnection, ErrorCode, ErrorResponse, Message, ServerSocket, SocketPath,
};
use hakana_str::Interner;
use rustc_hash::{FxHashMap, FxHashSet};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::atomic::AtomicU32;
use std::sync::{Arc, Mutex, MutexGuard};
use std::time::Instant;

Expand Down Expand Up @@ -212,11 +213,26 @@ impl Server {
let logger = self.logger.clone();
let previous_analysis_data = state.analysis_data.take();

let files_scanned = state.files_scanned.clone();
let total_files_to_scan = state.total_files_to_scan.clone();

let files_analyzed = state.files_analyzed.clone();
let total_files_to_analyze = state.total_files_to_analyze.clone();

let tx = self.analysis_tx.clone();

tokio::task::spawn_blocking(move || {
let result =
run_analysis(&config, &logger, previous_analysis_data, changes).map(&Arc::new);
let result = run_analysis(
&config,
&logger,
previous_analysis_data,
changes,
files_scanned,
total_files_to_scan,
files_analyzed,
total_files_to_analyze,
)
.map(&Arc::new);
let _ = tx.send(result);
});
}
Expand Down Expand Up @@ -360,6 +376,10 @@ fn run_analysis(
logger: &Arc<Logger>,
previous_analysis_data: Option<Arc<(AnalysisResult, SuccessfulScanData)>>,
changes: Option<FxHashMap<String, FileStatus>>,
files_scanned: Arc<AtomicU32>,
total_files_to_scan: Arc<AtomicU32>,
files_analyzed: Arc<AtomicU32>,
total_files_to_analyze: Arc<AtomicU32>,
) -> Result<(AnalysisResult, SuccessfulScanData), String> {
let all_custom_issues: FxHashSet<String> = config
.plugins
Expand Down Expand Up @@ -398,7 +418,19 @@ fn run_analysis(
.map(|d| (Some(d.1.clone()), Some(d.0.clone())))
.unwrap_or((None, None));

hakana_orchestrator::scan_and_analyze(
files_scanned.store(0, std::sync::atomic::Ordering::Relaxed);
total_files_to_scan.store(0, std::sync::atomic::Ordering::Relaxed);
files_analyzed.store(0, std::sync::atomic::Ordering::Relaxed);
total_files_to_analyze.store(0, std::sync::atomic::Ordering::Relaxed);

let progress = AnalysisProgress {
files_scanned,
total_files_to_scan,
files_analyzed,
total_files_to_analyze,
};

hakana_orchestrator::scan_and_analyze_with_progress(
Vec::new(),
None,
None,
Expand All @@ -412,6 +444,7 @@ fn run_analysis(
previous_analysis_result,
changes,
|| {},
Some(progress),
)
.map_err(|e| e.to_string())
}
Loading