diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 041e82c16..ced3e1ba8 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -54,8 +54,10 @@ jobs: DEPLOYMENT_KERNEL_ADDRESS: ${{ inputs.kernel_address }} DEPLOY_OS: ${{ inputs.deploy_os }} DEPLOY_CONTRACTS: ${{ inputs.contracts }} + ENV: ${{ inputs.schema_parser }} SLACK_WEBHOOK_URL: "${{ secrets.DEPLOY_SLACK_WEBHOOK_URL }}" TEST_MNEMONIC: "${{ secrets.DEPLOY_TEST_MNEMONIC }}" + BLOB_READ_WRITE_TOKEN: "${{ secrets.VERCEL_BLOB_READ_WRITE_TOKEN }}" RUST_LOG: info steps: - uses: actions/checkout@v4 diff --git a/Cargo.lock b/Cargo.lock index d65bcead3..c89cd5e5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -430,11 +430,15 @@ dependencies = [ "cw-orch-daemon 0.29.2", "dotenv", "env_logger", + "hex", "log", "reqwest 0.12.22", "serde", "serde_json", + "sha2 0.10.9", "thiserror 2.0.12", + "tokio", + "vercel_blob", ] [[package]] @@ -1930,6 +1934,7 @@ dependencies = [ "iana-time-zone", "js-sys", "num-traits", + "serde", "wasm-bindgen", "windows-link", ] @@ -7318,6 +7323,21 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "vercel_blob" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9eddfd07ce156c02d0a7223b1282c279126143a133c0debb98d198b212bd93" +dependencies = [ + "async-trait", + "bytes", + "chrono 0.4.41", + "once_cell", + "reqwest 0.11.27", + "serde", + "thiserror 1.0.69", +] + [[package]] name = "version_check" version = "0.9.5" diff --git a/packages/deploy/Cargo.toml b/packages/deploy/Cargo.toml index 14e0c655b..c40fa848a 100644 --- a/packages/deploy/Cargo.toml +++ b/packages/deploy/Cargo.toml @@ -20,7 +20,11 @@ log = "0.4" dotenv = "0.15.0" reqwest = { version = "0.12", features = ["json", "blocking"] } serde_json = "1.0" -chrono = "0.4" +chrono = { version = "0.4", features = ["serde"] } +hex = "0.4" +sha2 = "0.10" +tokio = { version = "1.0", features = ["rt", "fs"] } +vercel_blob = "0.1.0" # OS Contracts andromeda-kernel = { path = "../../contracts/os/andromeda-kernel" } diff --git a/packages/deploy/src/error.rs b/packages/deploy/src/error.rs index a67b2d5dd..06c94b828 100644 --- a/packages/deploy/src/error.rs +++ b/packages/deploy/src/error.rs @@ -5,4 +5,6 @@ use thiserror::Error; pub enum DeployError { #[error("{0}")] CwOrchError(#[from] CwOrchError), + #[error("{0}")] + Generic(String), } diff --git a/packages/deploy/src/lib.rs b/packages/deploy/src/lib.rs index 7eceeca0f..4a64bd4ae 100644 --- a/packages/deploy/src/lib.rs +++ b/packages/deploy/src/lib.rs @@ -8,3 +8,4 @@ pub mod os; pub mod report; pub mod slack; pub mod validate; +pub mod vercel; diff --git a/packages/deploy/src/main.rs b/packages/deploy/src/main.rs index 2fff279f5..278d77979 100644 --- a/packages/deploy/src/main.rs +++ b/packages/deploy/src/main.rs @@ -2,7 +2,9 @@ use andromeda_deploy::build; use andromeda_deploy::report::DeploymentReport; use andromeda_deploy::slack::SlackNotification; use andromeda_deploy::validate; +use andromeda_deploy::vercel; use std::env; +use std::fs; use andromeda_deploy::adodb; use andromeda_deploy::os; @@ -11,12 +13,63 @@ use dotenv::dotenv; fn main() { env_logger::init(); dotenv().ok(); + let kernel_address = env::var("DEPLOYMENT_KERNEL_ADDRESS").ok().unwrap(); - validate::run(); + let chain = dotenv::var("DEPLOYMENT_CHAIN").expect("DEPLOYMENT_CHAIN must be set"); + let mut should_upload_after_deploy = false; - build::run(); + let blobs = { + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(andromeda_deploy::vercel::list_commit_blobs()) + }; + + match blobs { + Ok(blobs) if !blobs.is_empty() => { + log::info!("Found prebuilt artifacts on Vercel for this commit. Restoring..."); + { + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(vercel::download_blobs_to_artifacts(&blobs)) + .unwrap(); + } + + let contracts_to_deploy = fs::read_dir("artifacts") + .unwrap() + .map(|file| file.unwrap().file_name().to_str().unwrap().to_string()) + .collect::>(); + + let adodb_res = adodb::deploy( + chain.clone(), + kernel_address.clone(), + Some(contracts_to_deploy), + ); + if let Err(e) = adodb_res { + println!("Error deploying ADODB: {}", e); + SlackNotification::ADODeploymentFailed(chain.clone(), e) + .send() + .unwrap(); + std::process::exit(1); + } + + let deployed_contracts = adodb_res.unwrap(); + DeploymentReport { + chain_id: chain.clone(), + contracts: deployed_contracts, + kernel_address: kernel_address.clone(), + } + .write_to_json() + .unwrap(); + } + Ok(_) => { + validate::run(); + build::run(); + should_upload_after_deploy = true; + } + Err(e) => { + println!("Failed to list blobs: {}", e); + std::process::exit(1); + } + } - let chain = env::var("DEPLOYMENT_CHAIN").expect("DEPLOYMENT_CHAIN must be set"); let mut kernel_address = env::var("DEPLOYMENT_KERNEL_ADDRESS").ok(); // Send start notification @@ -75,8 +128,21 @@ fn main() { DeploymentReport { chain_id: chain.clone(), contracts: deployed_contracts, - kernel_address, + kernel_address: kernel_address.clone(), } .write_to_json() .unwrap(); + + // Upload artifacts only if we built them in this run (cache miss path) + if should_upload_after_deploy { + build::build_all_contracts(); + let upload_res = { + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(vercel::upload_wasm_folder("artifacts")) + }; + if let Err(e) = upload_res { + println!("Error uploading artifacts to Vercel Blob: {}", e); + // Non-fatal + } + } } diff --git a/packages/deploy/src/slack.rs b/packages/deploy/src/slack.rs index ed7d7ca18..ea9e893b2 100644 --- a/packages/deploy/src/slack.rs +++ b/packages/deploy/src/slack.rs @@ -35,6 +35,7 @@ pub enum SlackNotification { DeploymentStarted(String, Option), DeploymentCompleted(String, Option), DeploymentFailed(String, Option, DeployError), + DeploymentSkipped(String, String), ADODeploymentStarted(String, Vec), ADODeploymentCompleted(String, Vec<(String, String, u64)>), ADODeploymentFailed(String, DeployError), @@ -62,6 +63,10 @@ impl std::fmt::Display for SlackNotification { let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); write!(f, "❌ *Deployment Failed*\n```\n| Chain | {} |\n| Time | {} |\n| Kernel Address | {} |\n| Error | {} |```", chain, timestamp, kernel_address.as_deref().unwrap_or("Not provided"), error) } + SlackNotification::DeploymentSkipped(chain, schema_parser_env) => { + let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); + write!(f, "⏭️ *Deployment Skipped*\n```\n| Chain | {} |\n| Environment | {} |\n| Time | {} |\n| Reason | No changes detected |```", chain, schema_parser_env, timestamp) + } SlackNotification::ADODeploymentStarted(chain, contracts) => { write!(f, "🚀 *ADO Library Deployment Started*\n```\n| Chain | {} |\n| Contracts | {} |```", chain, contracts.join(", ")) } diff --git a/packages/deploy/src/validate.rs b/packages/deploy/src/validate.rs index 2441d7a46..ecaf5cc28 100644 --- a/packages/deploy/src/validate.rs +++ b/packages/deploy/src/validate.rs @@ -214,7 +214,7 @@ fn filter_deployed_contracts() { env::set_var("DEPLOYMENT_SKIP_BUILD", "true"); } else if deployable_contracts.is_empty() { // If we don't have any contracts to deploy and we don't want to deploy the OS we should panic as there is nothing to do - panic!("No contracts to deploy - all specified contracts are already deployed"); + log::warn!("No contracts to deploy - all specified contracts are already deployed"); } // Check if we are deploying all contracts diff --git a/packages/deploy/src/vercel.rs b/packages/deploy/src/vercel.rs new file mode 100644 index 000000000..81df46fe1 --- /dev/null +++ b/packages/deploy/src/vercel.rs @@ -0,0 +1,156 @@ +use std::fs; +use std::path::Path; +use vercel_blob::{ + self, + client::{ + DownloadCommandOptions, ListBlobResultBlob, ListCommandOptions, PutCommandOptions, + VercelBlobApi, + }, +}; + +/// List all blobs for the current commit by using a prefix of `/`. +pub async fn list_commit_blobs() -> Result, Box> { + let commit_hash_bytes = std::process::Command::new("git") + .arg("rev-parse") + .arg("HEAD") + .output()?; + let commit_hash = String::from_utf8(commit_hash_bytes.stdout)?; + let commit_hash = commit_hash.trim(); + + let client = vercel_blob::client::VercelBlobClient::new(); + let command_options = ListCommandOptions { + limit: None, + prefix: Some(format!("{}/", commit_hash)), + cursor: None, + }; + + let list_of_blobs = client.list(command_options).await?; + Ok(list_of_blobs.blobs) +} + +/// Download all provided blobs into the local `artifacts/` directory. +pub async fn download_blobs_to_artifacts( + blobs: &[ListBlobResultBlob], +) -> Result<(), Box> { + if blobs.is_empty() { + return Ok(()); + } + + fs::create_dir_all("artifacts")?; + + let client = vercel_blob::client::VercelBlobClient::new(); + for blob in blobs { + let download_options = DownloadCommandOptions { byte_range: None }; + let bytes = client.download(&blob.url, download_options).await?; + let filename = Path::new(&blob.pathname) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("artifact.bin"); + let out_path = Path::new("artifacts").join(filename); + fs::write(out_path, bytes)?; + } + + Ok(()) +} + +pub async fn download_blob(blob_url: &str) -> Result<(), Box> { + let client = vercel_blob::client::VercelBlobClient::new(); + + let download_options = DownloadCommandOptions { byte_range: None }; + + client.download(blob_url, download_options).await?; + + Ok(()) +} + +pub fn copy_files(source_dir: &str, dest_dir: &str) -> Result<(), Box> { + let source_path = Path::new(source_dir); + let dest_path = Path::new(dest_dir); + + // Create destination directory if it doesn't exist + fs::create_dir_all(dest_path)?; + + // Read the source directory + let entries = fs::read_dir(source_path)?; + + for entry in entries { + let entry = entry?; + let file_type = entry.file_type()?; + + if file_type.is_file() { + let source_file = entry.path(); + let file_name = source_file.file_name().unwrap(); + let dest_file = dest_path.join(file_name); + + // Copy the file + fs::copy(&source_file, &dest_file)?; + println!("Copied: {:?} -> {:?}", source_file, dest_file); + } + } + + Ok(()) +} + +pub async fn upload_blob( + blob_path: &str, + bytes: Vec, + content_type: Option<&str>, +) -> Result<(), Box> { + let client = vercel_blob::client::VercelBlobClient::new(); + let put_options = PutCommandOptions { + add_random_suffix: false, + cache_control_max_age: None, + content_type: content_type.map(|s| s.to_string()), + }; + client.put(blob_path, bytes, put_options).await?; + Ok(()) +} + +/// Upload all `.wasm` files from `folder_path` to Vercel Blob under +/// `/`. +pub async fn upload_wasm_folder(folder_path: &str) -> Result<(), Box> { + // Determine current commit hash + let commit_hash_bytes = std::process::Command::new("git") + .arg("rev-parse") + .arg("HEAD") + .output()?; + let commit_hash = String::from_utf8(commit_hash_bytes.stdout)?; + let commit_hash = commit_hash.trim(); + + let dir_iter = fs::read_dir(folder_path)?; + for entry in dir_iter { + let entry = entry?; + let file_type = entry.file_type()?; + if !file_type.is_file() { + continue; + } + + let path = entry.path(); + if path + .extension() + .and_then(|e| e.to_str()) + .map(|e| e.eq_ignore_ascii_case("wasm")) + .unwrap_or(false) + { + let file_name = match path.file_name().and_then(|n| n.to_str()) { + Some(name) => name.to_string(), + None => continue, + }; + + let blob_path = format!("{}/{}", commit_hash, file_name); + let bytes = fs::read(&path)?; + // Upload with deterministic path and proper content type for WASM + upload_blob(&blob_path, bytes, Some("application/wasm")).await?; + log::info!("Uploaded {} to {}", path.display(), blob_path); + } else if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) { + if file_name.eq_ignore_ascii_case("version_map.json") { + let blob_path = format!("{}/{}", commit_hash, file_name); + let bytes = fs::read(&path)?; + upload_blob(&blob_path, bytes, Some("application/json")).await?; + log::info!("Uploaded {} to {}", path.display(), blob_path); + } + } + } + + Ok(()) +}