diff --git a/src-tauri/src/launch/helpers/file_validator.rs b/src-tauri/src/launch/helpers/file_validator.rs index b7743ec12..74a182d6c 100644 --- a/src-tauri/src/launch/helpers/file_validator.rs +++ b/src-tauri/src/launch/helpers/file_validator.rs @@ -11,23 +11,180 @@ use crate::resource::models::{ResourceType, SourceType}; use crate::tasks::download::DownloadParam; use crate::tasks::PTaskParam; use crate::utils::fs::validate_sha1; -use futures::future::join_all; +use crate::utils::sys_info::get_concurrent_limit; +use futures::stream::{self, StreamExt}; use semver::Version; use std::collections::{HashMap, HashSet}; use std::io::Cursor; use std::path::{Path, PathBuf}; +use std::sync::OnceLock; +use sysinfo::{CpuRefreshKind, RefreshKind, System}; use tauri::AppHandle; use tokio::fs; +use tokio::sync::Semaphore; +use url::Url; use zip::ZipArchive; +#[derive(Debug, Hash, Eq, PartialEq)] +struct LibraryKey { + path: String, + pack_name: String, + classifier: Option, + extension: String, +} + +pub struct LibraryParts { + pub path: String, + pub pack_name: String, + pub pack_version: String, + pub classifier: Option, + pub extension: String, +} + +fn parse_sem_version(version: &str) -> Version { + Version::parse(version).unwrap_or_else(|_| { + let mut parts = version.split('.').collect::>(); + while parts.len() < 3 { + parts.push("0"); + } + Version::parse(&parts[..3].join(".")).unwrap_or_else(|_| Version::new(0, 1, 0)) + }) +} + +pub fn parse_library_name(name: &str, native: Option) -> SJMCLResult { + let parts: Vec<&str> = name.split('@').collect(); + let file_ext = parts + .get(1) + .map(|s| s.to_string()) + .unwrap_or_else(|| "jar".to_string()); + + let mut name_split: Vec = parts[0].split(':').map(|s| s.to_string()).collect(); + + if name_split.len() < 3 { + return Err(InstanceError::InvalidSourcePath.into()); + } + + if let Some(native) = native { + name_split.push(native); + } + + let path = name_split[0].replace('.', "/"); + let pack_name = name_split[1].clone(); + let pack_version = name_split[2].clone(); + let classifier = name_split.get(3).cloned(); + + Ok(LibraryParts { + path, + pack_name, + pack_version, + classifier, + extension: file_ext, + }) +} + +pub fn convert_library_name_to_path(name: &str, native: Option) -> SJMCLResult { + let LibraryParts { + path, + pack_name, + pack_version, + classifier, + extension: file_ext, + } = parse_library_name(name, native)?; + + let file_name = [ + pack_name.clone(), + pack_version.clone(), + classifier.unwrap_or_default(), + ] + .iter() + .filter(|s| !s.is_empty()) + .map(|s| s.as_str()) + .collect::>() + .join("-") + + "." + + &file_ext; + + Ok(format!("{path}/{pack_name}/{pack_version}/{file_name}")) +} + +async fn validate_file_with_hash( + file_path: PathBuf, + expected_hash: String, + download_url: Url, + check_hash: bool, +) -> SJMCLResult> { + let exists = fs::try_exists(&file_path).await?; + + let needs_download = !exists || { + if check_hash { + let hash = expected_hash.clone(); + let path = file_path.clone(); + let is_valid = tokio::task::spawn_blocking(move || validate_sha1(path, hash).is_ok()).await?; + !is_valid + } else { + false + } + }; + + if needs_download { + return Ok(Some(PTaskParam::Download(DownloadParam { + src: download_url, + dest: file_path, + filename: None, + sha1: Some(expected_hash), + }))); + } + + Ok(None) +} + +async fn validate_files_concurrently( + items: impl IntoIterator, + check_hash: bool, + processor: F, +) -> SJMCLResult> +where + T: Send + Sync + 'static, + F: Fn(T, bool) -> Fut + Send + Sync + Clone + 'static, + Fut: std::future::Future>> + Send, +{ + // 3.0 → Used for downloads & hash validation (lightweight I/O-bound — higher concurrency is safe and efficient) + let max_concurrent = get_concurrent_limit(3.0); + let semaphore = std::sync::Arc::new(Semaphore::new(max_concurrent)); + + let items_vec: Vec = items.into_iter().collect(); + let processor = std::sync::Arc::new(processor); + + let results = stream::iter(items_vec) + .map(|item| { + let permit = semaphore.clone().acquire_owned(); + let processor = processor.clone(); + + async move { + let _permit = permit.await; + + processor(item, check_hash).await + } + }) + .buffer_unordered(max_concurrent) + .collect::>() + .await; + + let mut params = Vec::new(); + for r in results { + if let Some(p) = r? { + params.push(p); + } + } + Ok(params) +} + pub fn get_nonnative_library_artifacts(client_info: &McClientInfo) -> Vec { let mut artifacts = HashSet::new(); let feature = FeaturesInfo::default(); + for library in &client_info.libraries { - if !library.is_allowed(&feature).unwrap_or(false) { - continue; - } - if library.natives.is_some() { + if !library.is_allowed(&feature).unwrap_or(false) || library.natives.is_some() { continue; } if let Some(ref downloads) = &library.downloads { @@ -64,102 +221,47 @@ pub fn get_native_library_artifacts(client_info: &McClientInfo) -> Vec SJMCLResult> { - let mut artifacts = Vec::new(); - artifacts.extend(get_native_library_artifacts(client_info)); - artifacts.extend(get_nonnative_library_artifacts(client_info)); - - let futs = artifacts.into_iter().map(move |artifact| async move { - let file_path = library_path.join(&artifact.path); - let exists = fs::try_exists(&file_path).await?; - if exists && (!check_hash || validate_sha1(file_path.clone(), artifact.sha1.clone()).is_ok()) { - Ok(None) - } else if artifact.url.is_empty() { - Err(LaunchError::GameFilesIncomplete.into()) - } else { - let src = convert_url_to_target_source( - &url::Url::parse(&artifact.url)?, - &[ - ResourceType::Libraries, - ResourceType::FabricMaven, - ResourceType::ForgeMaven, - ResourceType::ForgeMavenNew, - ResourceType::NeoforgeMaven, - ], - &source, - )?; - Ok(Some(PTaskParam::Download(DownloadParam { - src, - dest: file_path, - filename: None, - sha1: Some(artifact.sha1.clone()), - }))) - } - }); - - let results: Vec>> = join_all(futs).await; + library_path: &Path, +) -> SJMCLResult> { + let mut libraries = Vec::new(); + let feature = FeaturesInfo::default(); - let mut params = Vec::new(); - for r in results { - if let Some(p) = r? { - params.push(p); + for library in &client_info.libraries { + if library.is_allowed(&feature).unwrap_or(false) && library.natives.is_none() { + libraries.push(library.clone()); } } - Ok(params) -} + libraries = merge_library_lists(&libraries, &[]); -pub struct LibraryParts { - pub path: String, - pub pack_name: String, - pub pack_version: String, - pub classifier: Option, - pub extension: String, + libraries + .iter() + .map(|lib| Ok(library_path.join(convert_library_name_to_path(&lib.name, None)?))) + .collect() } -pub fn parse_library_name(name: &str, native: Option) -> SJMCLResult { - let parts: Vec<&str> = name.split('@').collect(); - let file_ext = if parts.len() > 1 { - parts[1].to_string() - } else { - "jar".to_string() - }; - let mut name_split: Vec = parts[0].split(':').map(|s| s.to_string()).collect(); - if name_split.len() < 3 { - Err(InstanceError::InvalidSourcePath.into()) - } else { - if let Some(native) = native { - name_split.push(native); +pub fn get_native_library_paths( + client_info: &McClientInfo, + library_path: &Path, +) -> SJMCLResult> { + let mut result = Vec::new(); + let feature = FeaturesInfo::default(); + for library in &client_info.libraries { + if !library.is_allowed(&feature).unwrap_or(false) || library.natives.is_none() { + continue; } - let pack_name = name_split[1].clone(); - let pack_version = name_split[2].clone(); - let classifier = if name_split.len() > 3 { - Some(name_split[3].clone()) + let native_str = if let Some(native_fn) = Some(&get_natives_string) { + library.natives.as_ref().and_then(native_fn) } else { None }; - let path = name_split[0].replace('.', "/"); - Ok(LibraryParts { - path, - pack_name, - pack_version, - classifier, - extension: file_ext, - }) - } -} -#[derive(Debug, Hash, Eq, PartialEq)] -struct LibraryKey { - path: String, - pack_name: String, - classifier: Option, - extension: String, + let path = convert_library_name_to_path(&library.name, native_str)?; + result.push(library_path.join(path)); + } + Ok(result) } // merge two vectors of libraries, remove duplicates by name, keep the one with the highest version. also remove libraries with invalid names @@ -178,14 +280,14 @@ pub fn merge_library_lists( extension: library_parts.extension, }; - let new_version = library_parts.pack_version; + let new_version = &library_parts.pack_version; if let Some(existing_library) = library_map.get(&key) { let existing_version = parse_library_name(&existing_library.name, None) .map(|parts| parts.pack_version) .unwrap_or("0.1.0".to_string()); - if parse_sem_version(&new_version) > parse_sem_version(&existing_version) { + if parse_sem_version(new_version) > parse_sem_version(&existing_version) { library_map.insert(key, library.clone()); } } else { @@ -197,83 +299,51 @@ pub fn merge_library_lists( library_map.into_values().collect() } -fn parse_sem_version(version: &str) -> Version { - Version::parse(version).unwrap_or({ - let mut parts = version.split('.').collect::>(); - while parts.len() < 3 { - parts.push("0"); - } - Version::parse(&parts[..3].join(".")).unwrap_or(Version::new(0, 1, 0)) - }) -} +pub async fn get_invalid_library_files( + source: SourceType, + library_path: &Path, + client_info: &McClientInfo, + check_hash: bool, +) -> SJMCLResult> { + let mut artifacts = Vec::new(); + artifacts.extend(get_native_library_artifacts(client_info)); + artifacts.extend(get_nonnative_library_artifacts(client_info)); -pub fn convert_library_name_to_path(name: &str, native: Option) -> SJMCLResult { - let LibraryParts { - path, - pack_name, - pack_version, - classifier, - extension: file_ext, - } = parse_library_name(name, native)?; + let library_path = library_path.to_path_buf(); + let source = source.clone(); - let file_name = [ - pack_name.clone(), - pack_version.clone(), - classifier.unwrap_or_default(), - ] - .iter() - .filter(|s| !s.is_empty()) - .map(|s| s.as_str()) - .collect::>() - .join("-") - + "." - + &file_ext; - Ok(format!("{path}/{pack_name}/{pack_version}/{file_name}")) -} + validate_files_concurrently( + artifacts.into_iter(), + check_hash, + move |artifact, check_hash| { + let source = source.clone(); + let library_path = library_path.clone(); -pub fn get_nonnative_library_paths( - client_info: &McClientInfo, - library_path: &Path, -) -> SJMCLResult> { - let mut libraries = Vec::new(); - let feature = FeaturesInfo::default(); - for library in &client_info.libraries { - if !library.is_allowed(&feature).unwrap_or(false) { - continue; - } - if library.natives.is_some() { - continue; - } - libraries.push(library.clone()); - } - libraries = merge_library_lists(&libraries, &[]); // remove duplicates to prevent launch errors - let mut result = Vec::new(); - for library in libraries { - result.push(library_path.join(convert_library_name_to_path(&library.name, None)?)); - } - Ok(result) -} + async move { + if artifact.url.is_empty() { + return Err(LaunchError::GameFilesIncomplete.into()); + } -pub fn get_native_library_paths( - client_info: &McClientInfo, - library_path: &Path, -) -> SJMCLResult> { - let mut result = Vec::new(); - let feature = FeaturesInfo::default(); - for library in &client_info.libraries { - if !library.is_allowed(&feature).unwrap_or(false) { - continue; - } - if let Some(natives) = &library.natives { - if let Some(native) = get_natives_string(natives) { - let path = convert_library_name_to_path(&library.name, Some(native))?; - result.push(library_path.join(path)); - } else { - println!("natives is None"); + let file_path = library_path.join(&artifact.path); + let url = Url::parse(&artifact.url)?; + + let download_url = convert_url_to_target_source( + &url, + &[ + ResourceType::Libraries, + ResourceType::FabricMaven, + ResourceType::ForgeMaven, + ResourceType::ForgeMavenNew, + ResourceType::NeoforgeMaven, + ], + &source, + )?; + + validate_file_with_hash(file_path, artifact.sha1, download_url, check_hash).await } - } - } - Ok(result) + }, + ) + .await } pub async fn extract_native_libraries( @@ -284,22 +354,26 @@ pub async fn extract_native_libraries( if !natives_dir.exists() { fs::create_dir(natives_dir).await?; } + + // 1.5 → Used for native library extraction (mixed I/O + CPU, heavier work — keep conservative to avoid blocking) + let max_concurrent = get_concurrent_limit(1.5); + let native_libraries = get_native_library_paths(client_info, library_path)?; - let tasks: Vec>> = native_libraries - .into_iter() + + let results: Vec<_> = stream::iter(native_libraries) .map(|library_path| { let patches_dir_clone = natives_dir.clone(); - tokio::spawn(async move { - let file = Cursor::new(fs::read(library_path).await?); + async move { + let file = Cursor::new(fs::read(&library_path).await?); let mut jar = ZipArchive::new(file)?; jar.extract(&patches_dir_clone)?; - Ok(()) - }) + Ok::<_, crate::error::SJMCLError>(()) + } }) - .collect(); - - let results = futures::future::join_all(tasks).await; + .buffer_unordered(max_concurrent) + .collect::>() + .await; for result in results { if let Err(e) = result { @@ -319,42 +393,28 @@ pub async fn get_invalid_assets( check_hash: bool, ) -> SJMCLResult> { let assets_download_api = get_download_api(source, ResourceType::Assets)?; - let asset_index_path = asset_path.join(format!("indexes/{}.json", client_info.asset_index.id)); let asset_index = load_asset_index(app, &asset_index_path, &client_info.asset_index.url).await?; - let futs = asset_index.objects.into_values().map(|item| { - let assets_download_api = assets_download_api.clone(); - let base_path = asset_path.to_path_buf(); + let base_path = asset_path.to_path_buf(); - async move { - let path_in_repo = format!("{}/{}", &item.hash[..2], item.hash); - let dest = base_path.join(format!("objects/{}", path_in_repo)); - let exists = fs::try_exists(&dest).await?; + validate_files_concurrently( + asset_index.objects.into_values(), + check_hash, + move |item, check_hash| { + let assets_download_api = assets_download_api.clone(); + let base_path = base_path.clone(); - if exists && (!check_hash || validate_sha1(dest.clone(), item.hash.clone()).is_ok()) { - Ok::, crate::error::SJMCLError>(None) - } else { - let src = assets_download_api + async move { + let path_in_repo = format!("{}/{}", &item.hash[..2], item.hash); + let dest = base_path.join(format!("objects/{}", path_in_repo)); + let download_url = assets_download_api .join(&path_in_repo) .map_err(crate::error::SJMCLError::from)?; - Ok(Some(PTaskParam::Download(DownloadParam { - src, - dest, - filename: None, - sha1: Some(item.hash.clone()), - }))) - } - } - }); - - let results: Vec>> = join_all(futs).await; - let mut params = Vec::new(); - for r in results { - if let Some(p) = r? { - params.push(p); - } - } - Ok(params) + validate_file_with_hash(dest, item.hash, download_url, check_hash).await + } + }, + ) + .await } diff --git a/src-tauri/src/utils/sys_info.rs b/src-tauri/src/utils/sys_info.rs index 4a4a25bfb..7553bfbaf 100644 --- a/src-tauri/src/utils/sys_info.rs +++ b/src-tauri/src/utils/sys_info.rs @@ -3,7 +3,8 @@ use crate::launcher_config::models::MemoryInfo; use serde_json::json; use std::net::{SocketAddr, TcpListener}; use std::path::PathBuf; -use sysinfo::{Disk, Disks}; +use std::sync::OnceLock; +use sysinfo::{CpuRefreshKind, Disk, Disks, RefreshKind, System}; use systemstat::{saturating_sub_bytes, Platform}; use tauri_plugin_http::reqwest; use tauri_plugin_os::locale; @@ -151,3 +152,21 @@ pub fn find_free_port(start_port: Option) -> SJMCLResult { log::error!("No free port found."); Err(SJMCLError("No free port found".to_string())) } + +pub fn get_concurrent_limit(multiplier: f64) -> usize { + static CONCURRENT_LIMIT: OnceLock = OnceLock::new(); + + *CONCURRENT_LIMIT.get_or_init(|| { + let mut sys = + System::new_with_specifics(RefreshKind::nothing().with_cpu(CpuRefreshKind::everything())); + std::thread::sleep(sysinfo::MINIMUM_CPU_UPDATE_INTERVAL); + sys.refresh_cpu_usage(); + let cpu_count = sys.cpus().len() as f64; + + let raw = cpu_count * multiplier; + + let threads = raw.round() as usize; + + threads.max(8).min(32) + }) +}