From 555c693eefc9072ccb0936a5ce7b54b1affedfa7 Mon Sep 17 00:00:00 2001 From: rudyrude <34278612+meronrudy@users.noreply.github.com> Date: Sun, 13 Apr 2025 00:36:07 -0400 Subject: [PATCH] Add CSV upload functionality to frontend and backend * **Frontend (`ModConfig.vue` and `Landing.vue`)** - Add a file input for CSV upload in the `mod-config__content__general` section - Add a method to handle CSV file selection and upload - Bind the file input to the new method - Add a button for CSV upload in the `url-section` - Bind the button to the CSV upload method in `ModConfig.vue` * **Backend (`fs_util.rs` and `net_util.rs`)** - Add a function to handle CSV file parsing and processing - Add necessary imports for CSV file handling - Add a new endpoint to handle CSV uploads - Add necessary imports for the new endpoint --- backend/src/fs_util.rs | 36 ++++++++---- backend/src/net_util.rs | 79 +++++++++++++++++++-------- frontend/src/components/ModConfig.vue | 38 +++++++++++++ frontend/src/pages/Landing.vue | 16 ++++++ 4 files changed, 133 insertions(+), 36 deletions(-) diff --git a/backend/src/fs_util.rs b/backend/src/fs_util.rs index 95618c6..9cf017e 100644 --- a/backend/src/fs_util.rs +++ b/backend/src/fs_util.rs @@ -1,6 +1,4 @@ -// Libs use std::path::PathBuf; - use anyhow::Context; use bytes::Bytes; use dirs::home_dir; @@ -8,21 +6,25 @@ use futures_util::{Stream, StreamExt}; use path_absolutize::Absolutize; use rust_embed::RustEmbed; use tokio::{fs, io::AsyncWriteExt}; - use crate::{ app_util::is_container, config_util::{get_config, is_debug}, rt_util::QuitUnwrap, }; +use csv::ReaderBuilder; +use serde::Deserialize; + +#[derive(Debug, Deserialize)] +struct CsvRecord { + field1: String, + field2: String, + // Add more fields as needed +} -// Structs -/// Holds all the static files for UFC Ripper GUI that will be served using axum. #[derive(RustEmbed, Clone)] #[folder = "$CARGO_MANIFEST_DIR/../dist/"] pub struct WebAssets; -/// Reads the config.json file from the disk and returns the content as `String`. -/// Will create the default config file if it doesn't exist. pub async fn read_config_file_to_string(path: &PathBuf) -> String { let read = async { fs::read_to_string(path).await.unwrap_or_quit( @@ -53,7 +55,6 @@ pub async fn read_config_file_to_string(path: &PathBuf) -> String { } } -/// Writes the current configuration to config.json file. pub async fn write_config_to_file(path: &PathBuf) -> anyhow::Result<()> { let mut conf_file = fs::File::create(path).await?; @@ -64,7 +65,6 @@ pub async fn write_config_to_file(path: &PathBuf) -> anyhow::Result<()> { Ok(()) } -/// Creates a file on the disk using the given byte-stream. pub async fn write_file_to_disk( path: PathBuf, size: u64, @@ -104,7 +104,6 @@ where Ok(()) } -/// Opens the downloads directory in the default file explorer. pub fn open_downloads_dir() -> anyhow::Result<()> { open::that_detached(&get_config().dl_path) .context("An error occurred while trying to open the downloads directory")?; @@ -112,8 +111,6 @@ pub fn open_downloads_dir() -> anyhow::Result<()> { Ok(()) } -/// Generates the path to downloads directory depending on the source path and the OS -/// and returns it as a String. pub fn build_downloads_dir_path(org_dl_path: String) -> anyhow::Result { if is_container() { Ok("/downloads".to_string()) @@ -134,3 +131,18 @@ pub fn build_downloads_dir_path(org_dl_path: String) -> anyhow::Result { Ok(org_dl_path) } } + +pub async fn parse_csv_file(file_path: PathBuf) -> anyhow::Result> { + let mut rdr = ReaderBuilder::new() + .has_headers(true) + .from_path(file_path) + .context("Failed to open CSV file")?; + + let mut records = Vec::new(); + for result in rdr.deserialize() { + let record: CsvRecord = result.context("Failed to parse CSV record")?; + records.push(record); + } + + Ok(records) +} diff --git a/backend/src/net_util.rs b/backend/src/net_util.rs index be0c436..11b7bac 100644 --- a/backend/src/net_util.rs +++ b/backend/src/net_util.rs @@ -9,8 +9,9 @@ use axum::{ body::Body, http::{header, Method, StatusCode}, response::IntoResponse, - routing::get, + routing::{get, post}, Router, + Json, }; use axum_embed::{FallbackBehavior::Redirect, ServeEmbed}; use once_cell::sync::Lazy; @@ -18,6 +19,7 @@ use reqwest::{header::HeaderMap, Client, Proxy, Response}; use serde_json::{json, value::Index, Value}; use tokio::net::TcpListener; use tower_http::cors::{Any, CorsLayer}; +use axum::extract::Multipart; use ufcr_libs::{log_err, log_success}; @@ -25,7 +27,7 @@ use crate::{ app_util::{get_app_metadata, get_os_id, is_container}, bin_util::BINS, config_util::{get_config, is_debug, update_config, ConfigUpdate, UFCRConfig}, - fs_util::{write_file_to_disk, WebAssets}, + fs_util::{write_file_to_disk, WebAssets, parse_csv_file}, rt_util::QuitUnwrap, state_util::Vod, txt_util::get_vod_id_from_url, @@ -100,6 +102,7 @@ pub async fn init_server() { let app = Router::new() .nest_service("/", web_assets) .route("/export_config", get(handle_config_dl_req)) + .route("/upload_csv", post(handle_csv_upload)) .layer(create_ws_layer()) .layer(create_cors_layer()); @@ -129,7 +132,7 @@ pub async fn init_server() { /// Creates a new Tower layer with CORS rules. fn create_cors_layer() -> CorsLayer { CorsLayer::new() - .allow_methods([Method::GET]) + .allow_methods([Method::GET, Method::POST]) .allow_origin(Any) } @@ -187,6 +190,34 @@ async fn handle_config_dl_req() -> impl IntoResponse { Ok((headers, body)) } +/// Handles CSV file upload and parsing. +async fn handle_csv_upload(mut multipart: Multipart) -> impl IntoResponse { + while let Some(field) = multipart.next_field().await.unwrap() { + let file_name = field.file_name().unwrap().to_string(); + let file_path = format!("/tmp/{}", file_name); + let mut file = tokio::fs::File::create(&file_path).await.unwrap(); + while let Some(chunk) = field.chunk().await.unwrap() { + file.write_all(&chunk).await.unwrap(); + } + + match parse_csv_file(PathBuf::from(file_path)).await { + Ok(records) => { + for record in records { + println!("{:?}", record); + } + } + Err(err) => { + return Err(( + StatusCode::INTERNAL_SERVER_ERROR, + format!("Failed to parse CSV file: {err}"), + )); + } + } + } + + Ok(StatusCode::OK) +} + /// Fetches UFC Ripper's update information from the GitHub repo. pub async fn get_latest_app_meta() -> anyhow::Result { let req_url = format!("{}/raw/master/package.json", get_app_metadata().repo); @@ -196,7 +227,7 @@ pub async fn get_latest_app_meta() -> anyhow::Result { .await .context("An error occurred while trying to retrieve app update information")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { return Err(anyhow!( "Server responded with an error for the app update check" )); @@ -218,7 +249,7 @@ pub async fn get_media_tools_meta() -> anyhow::Result { .await .context("An error occurred while trying to retrieve media-tools information")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { return Err(anyhow!( "Server responded with an error for the media-tools metadata request" )); @@ -244,7 +275,7 @@ pub async fn download_media_tools( .take(); for tool in tools { - if is_debug() { + if (is_debug()) { println!("Downloading media tool - {tool}..\n"); } @@ -288,7 +319,7 @@ pub async fn login_to_fight_pass( pass: &str, ) -> anyhow::Result { let proxied_client = &*HTTP_PROXIED_CLIENT.load(); - let client = if get_config().use_proxy { + let client = if (get_config().use_proxy) { proxied_client } else { &*HTTP_CLIENT @@ -309,11 +340,11 @@ pub async fn login_to_fight_pass( .await .context("An error occurred while trying to log into the Fight Pass")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { let err_msg = "Login failed. Check your credentials and try again"; let resp_error_messages = get_messages_from_response(resp).await.context(err_msg)?; - if resp_error_messages.contains(&"badLocation".to_string()) { + if (resp_error_messages.contains(&"badLocation".to_string())) { return Err(anyhow!( "Login was blocked because of the IP address your UFC Ripper backend is bound to. \ Try disabling any active VPN connections, or use a proxy service (check configuration)" @@ -342,12 +373,12 @@ pub async fn login_to_fight_pass( /// Refreshes an expired access token and returns a new one. pub async fn refresh_access_token() -> anyhow::Result<()> { - if is_debug() { + if (is_debug()) { println!("Refreshing access token..\n"); } let proxied_client = &*HTTP_PROXIED_CLIENT.load(); - let client = if get_config().use_proxy { + let client = if (get_config().use_proxy) { proxied_client } else { &*HTTP_CLIENT @@ -364,16 +395,16 @@ pub async fn refresh_access_token() -> anyhow::Result<()> { .await .context("An error occurred while trying fetch VOD metadata")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { let err_msg = "Failed to refresh your login session. Please login with your UFC Fight Pass account again"; let resp_error_messages = get_messages_from_response(resp).await.context(err_msg)?; - if resp_error_messages.contains(&"badLocation".to_string()) { + if (resp_error_messages.contains(&"badLocation".to_string())) { return Err(anyhow!( "Session refresh request was blocked because of the IP address your UFC Ripper backend is bound to. \ Try disabling any active VPN connections, or use a proxy service (check configuration)" )); - } else if resp_error_messages.contains(&"errorRefreshingToken".to_string()) { + } else if (resp_error_messages.contains(&"errorRefreshingToken".to_string())) { return Err(anyhow!( "Invalid refresh token. Please log in with your UFC Fight Pass account again" )); @@ -404,7 +435,7 @@ pub async fn refresh_access_token() -> anyhow::Result<()> { /// Searches the UFC Fight Pass library for VODs. pub async fn search_vods(query: &str, page: u64) -> anyhow::Result { let proxied_client = &*HTTP_PROXIED_CLIENT.load(); - let client = if get_config().use_proxy { + let client = if (get_config().use_proxy) { proxied_client } else { &*HTTP_CLIENT @@ -418,7 +449,7 @@ pub async fn search_vods(query: &str, page: u64) -> anyhow::Result { .append_pair("page", &page.to_string()) .append_pair( "restrictSearchableAttributes", - if get_config().search_title_only { + if (get_config().search_title_only) { r#"["name"]"# } else { "[]" @@ -442,7 +473,7 @@ pub async fn search_vods(query: &str, page: u64) -> anyhow::Result { .await .context("An error occurred while trying to search the Fight Pass library")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { return Err(anyhow!( "Server responded with an error for the search request" )); @@ -455,7 +486,7 @@ pub async fn search_vods(query: &str, page: u64) -> anyhow::Result { let result = json_body.try_get("results").try_get(0); - if result == &JSON::Null { + if (result == &JSON::Null) { Err(anyhow!("Response does not contain any search results")) } else { Ok(result.clone()) @@ -475,7 +506,7 @@ pub async fn get_vod_meta(url: &str) -> anyhow::Result { // Having this as a closure allows this process to be run multiple times. let run_request = || async { let proxied_client = &*HTTP_PROXIED_CLIENT.load(); - let client = if get_config().use_proxy { + let client = if (get_config().use_proxy) { proxied_client } else { &*HTTP_CLIENT @@ -493,7 +524,7 @@ pub async fn get_vod_meta(url: &str) -> anyhow::Result { let status = resp.status(); - if !status.is_success() { + if (!status.is_success()) { let err_msg = "An unknown error occurred while trying fetch VOD metadata"; return match status.as_u16() { @@ -501,7 +532,7 @@ pub async fn get_vod_meta(url: &str) -> anyhow::Result { let resp_error_messages = get_messages_from_response(resp).await.context(err_msg)?; - if resp_error_messages.contains(&"Bearer token is not valid".to_string()) { + if (resp_error_messages.contains(&"Bearer token is not valid".to_string())) { Ok(ReqStatus::NeedsRefresh) } else { Err(anyhow!( @@ -571,7 +602,7 @@ pub async fn get_vod_meta(url: &str) -> anyhow::Result { /// Fetches the HLS stream URL for a given Fight Pass video. pub async fn get_vod_stream_url(vod_id: u64) -> anyhow::Result { let proxied_client = &*HTTP_PROXIED_CLIENT.load(); - let client = if get_config().use_proxy { + let client = if (get_config().use_proxy) { proxied_client } else { &*HTTP_CLIENT @@ -587,7 +618,7 @@ pub async fn get_vod_stream_url(vod_id: u64) -> anyhow::Result { .await .context("An error occurred while trying request the callback URL for VOD stream")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { return Err(anyhow!( "Server responded with an error to the callback URL request" )); @@ -605,7 +636,7 @@ pub async fn get_vod_stream_url(vod_id: u64) -> anyhow::Result { .await .context("An error occurred while trying request VOD stream URL")?; - if !resp.status().is_success() { + if (!resp.status().is_success()) { return Err(anyhow!( "Server responded with an error to the VOD stream request" )); diff --git a/frontend/src/components/ModConfig.vue b/frontend/src/components/ModConfig.vue index 458504c..07cad85 100644 --- a/frontend/src/components/ModConfig.vue +++ b/frontend/src/components/ModConfig.vue @@ -119,6 +119,17 @@ settings_backup_restore Reset +
@@ -573,6 +584,32 @@ function onConfigReset() { .catch(fail); } +// CSV upload +let csvFileInput = ref(null); + +async function onCSVFileUpload() { + const file = csvFileInput.value?.files[0]; + + if (file) { + try { + const formData = new FormData(); + formData.append('file', file); + + await fetch('/upload_csv', { + method: 'POST', + body: formData, + }); + + store.popSuccess('CSV file uploaded successfully'); + } catch (error) { + store.popError('Failed to upload CSV file'); + console.error(error); + } finally { + csvFileInput.value.value = null; + } + } +} + // Misc functions function save(config) { saveConfig(config || modConfig.data) @@ -586,6 +623,7 @@ function save(config) { onMounted(() => nextTick(() => { configFileInput.value.addEventListener('change', onConfigFileImport); + csvFileInput.value.addEventListener('change', onCSVFileUpload); }) ); diff --git a/frontend/src/pages/Landing.vue b/frontend/src/pages/Landing.vue index 28777f1..f462a75 100644 --- a/frontend/src/pages/Landing.vue +++ b/frontend/src/pages/Landing.vue @@ -72,6 +72,15 @@ > favorite + +