Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,10 @@ jobs:
DEPLOYMENT_KERNEL_ADDRESS: ${{ inputs.kernel_address }}
DEPLOY_OS: ${{ inputs.deploy_os }}
DEPLOY_CONTRACTS: ${{ inputs.contracts }}
ENV: ${{ inputs.schema_parser }}
SLACK_WEBHOOK_URL: "${{ secrets.DEPLOY_SLACK_WEBHOOK_URL }}"
TEST_MNEMONIC: "${{ secrets.DEPLOY_TEST_MNEMONIC }}"
VERCEL_BLOB_READ_WRITE_TOKEN: "${{ secrets.VERCEL_BLOB_READ_WRITE_TOKEN }}"
RUST_LOG: info
steps:
- uses: actions/checkout@v4
Expand Down
20 changes: 20 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 5 additions & 1 deletion packages/deploy/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ log = "0.4"
dotenv = "0.15.0"
reqwest = { version = "0.12", features = ["json", "blocking"] }
serde_json = "1.0"
chrono = "0.4"
chrono = { version = "0.4", features = ["serde"] }
hex = "0.4"
sha2 = "0.10"
tokio = { version = "1.0", features = ["rt", "fs"] }
vercel_blob = "0.1.0"

# OS Contracts
andromeda-kernel = { path = "../../contracts/os/andromeda-kernel" }
Expand Down
2 changes: 2 additions & 0 deletions packages/deploy/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@ use thiserror::Error;
pub enum DeployError {
#[error("{0}")]
CwOrchError(#[from] CwOrchError),
#[error("{0}")]
Generic(String),
}
1 change: 1 addition & 0 deletions packages/deploy/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ pub mod os;
pub mod report;
pub mod slack;
pub mod validate;
pub mod vercel;
65 changes: 60 additions & 5 deletions packages/deploy/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,68 @@ use andromeda_deploy::build;
use andromeda_deploy::report::DeploymentReport;
use andromeda_deploy::slack::SlackNotification;
use andromeda_deploy::validate;
use andromeda_deploy::vercel;
use std::env;
use std::fs;

use andromeda_deploy::adodb;
use andromeda_deploy::os;
use dotenv::dotenv;

fn main() {
#[tokio::main]
async fn main() {
env_logger::init();
dotenv().ok();
let kernel_address = env::var("DEPLOYMENT_KERNEL_ADDRESS").ok().unwrap();

validate::run();
let chain = dotenv::var("DEPLOYMENT_CHAIN").expect("DEPLOYMENT_CHAIN must be set");
let mut should_upload_after_deploy = false;

build::run();
let blobs = vercel::list_commit_blobs().await;

match blobs {
Ok(blobs) if !blobs.is_empty() => {
log::info!("Found prebuilt artifacts on Vercel for this commit. Restoring...");
vercel::download_blobs_to_artifacts(&blobs).await.unwrap();

let contracts_to_deploy = fs::read_dir("artifacts")
.unwrap()
.map(|file| file.unwrap().file_name().to_str().unwrap().to_string())
.collect::<Vec<String>>();

let adodb_res = adodb::deploy(
chain.clone(),
kernel_address.clone(),
Some(contracts_to_deploy),
);
if let Err(e) = adodb_res {
println!("Error deploying ADODB: {}", e);
SlackNotification::ADODeploymentFailed(chain.clone(), e)
.send()
.unwrap();
std::process::exit(1);
}

let deployed_contracts = adodb_res.unwrap();
DeploymentReport {
chain_id: chain.clone(),
contracts: deployed_contracts,
kernel_address: kernel_address.clone(),
}
.write_to_json()
.unwrap();
}
Ok(_) => {
validate::run();
build::run();
should_upload_after_deploy = true;
}
Err(e) => {
println!("Failed to list blobs: {}", e);
std::process::exit(1);
}
}

let chain = env::var("DEPLOYMENT_CHAIN").expect("DEPLOYMENT_CHAIN must be set");
let mut kernel_address = env::var("DEPLOYMENT_KERNEL_ADDRESS").ok();

// Send start notification
Expand Down Expand Up @@ -75,8 +122,16 @@ fn main() {
DeploymentReport {
chain_id: chain.clone(),
contracts: deployed_contracts,
kernel_address,
kernel_address: kernel_address.clone(),
}
.write_to_json()
.unwrap();

// Upload artifacts only if we built them in this run (cache miss path)
if should_upload_after_deploy {
if let Err(e) = vercel::upload_wasm_folder("artifacts").await {
println!("Error uploading artifacts to Vercel Blob: {}", e);
// Non-fatal
}
}
}
5 changes: 5 additions & 0 deletions packages/deploy/src/slack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ pub enum SlackNotification {
DeploymentStarted(String, Option<String>),
DeploymentCompleted(String, Option<String>),
DeploymentFailed(String, Option<String>, DeployError),
DeploymentSkipped(String, String),
ADODeploymentStarted(String, Vec<String>),
ADODeploymentCompleted(String, Vec<(String, String, u64)>),
ADODeploymentFailed(String, DeployError),
Expand Down Expand Up @@ -62,6 +63,10 @@ impl std::fmt::Display for SlackNotification {
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
write!(f, "❌ *Deployment Failed*\n```\n| Chain | {} |\n| Time | {} |\n| Kernel Address | {} |\n| Error | {} |```", chain, timestamp, kernel_address.as_deref().unwrap_or("Not provided"), error)
}
SlackNotification::DeploymentSkipped(chain, schema_parser_env) => {
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
write!(f, "⏭️ *Deployment Skipped*\n```\n| Chain | {} |\n| Environment | {} |\n| Time | {} |\n| Reason | No changes detected |```", chain, schema_parser_env, timestamp)
}
SlackNotification::ADODeploymentStarted(chain, contracts) => {
write!(f, "🚀 *ADO Library Deployment Started*\n```\n| Chain | {} |\n| Contracts | {} |```", chain, contracts.join(", "))
}
Expand Down
156 changes: 156 additions & 0 deletions packages/deploy/src/vercel.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
use std::fs;
use std::path::Path;
use vercel_blob::{
self,
client::{
DownloadCommandOptions, ListBlobResultBlob, ListCommandOptions, PutCommandOptions,
VercelBlobApi,
},
};

/// List all blobs for the current commit by using a prefix of `<commit_hash>/`.
pub async fn list_commit_blobs() -> Result<Vec<ListBlobResultBlob>, Box<dyn std::error::Error>> {
let commit_hash_bytes = std::process::Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.output()?;
let commit_hash = String::from_utf8(commit_hash_bytes.stdout)?;
let commit_hash = commit_hash.trim();

let client = vercel_blob::client::VercelBlobClient::new();
let command_options = ListCommandOptions {
limit: None,
prefix: Some(format!("{}/", commit_hash)),
cursor: None,
};

let list_of_blobs = client.list(command_options).await?;
Ok(list_of_blobs.blobs)
}

/// Download all provided blobs into the local `artifacts/` directory.
pub async fn download_blobs_to_artifacts(
blobs: &[ListBlobResultBlob],
) -> Result<(), Box<dyn std::error::Error>> {
if blobs.is_empty() {
return Ok(());
}

fs::create_dir_all("artifacts")?;

let client = vercel_blob::client::VercelBlobClient::new();
for blob in blobs {
let download_options = DownloadCommandOptions { byte_range: None };
let bytes = client.download(&blob.url, download_options).await?;
let filename = Path::new(&blob.pathname)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("artifact.bin");
let out_path = Path::new("artifacts").join(filename);
fs::write(out_path, bytes)?;
}

Ok(())
}

pub async fn download_blob(blob_url: &str) -> Result<(), Box<dyn std::error::Error>> {
let client = vercel_blob::client::VercelBlobClient::new();

let download_options = DownloadCommandOptions { byte_range: None };

client.download(blob_url, download_options).await?;

Ok(())
}

pub fn copy_files(source_dir: &str, dest_dir: &str) -> Result<(), Box<dyn std::error::Error>> {
let source_path = Path::new(source_dir);
let dest_path = Path::new(dest_dir);

// Create destination directory if it doesn't exist
fs::create_dir_all(dest_path)?;

// Read the source directory
let entries = fs::read_dir(source_path)?;

for entry in entries {
let entry = entry?;
let file_type = entry.file_type()?;

if file_type.is_file() {
let source_file = entry.path();
let file_name = source_file.file_name().unwrap();
let dest_file = dest_path.join(file_name);

// Copy the file
fs::copy(&source_file, &dest_file)?;
println!("Copied: {:?} -> {:?}", source_file, dest_file);
}
}

Ok(())
}

pub async fn upload_blob(
blob_path: &str,
bytes: Vec<u8>,
content_type: Option<&str>,
) -> Result<(), Box<dyn std::error::Error>> {
let client = vercel_blob::client::VercelBlobClient::new();
let put_options = PutCommandOptions {
add_random_suffix: false,
cache_control_max_age: None,
content_type: content_type.map(|s| s.to_string()),
};
client.put(blob_path, bytes, put_options).await?;
Ok(())
}

/// Upload all `.wasm` files from `folder_path` to Vercel Blob under
/// `<commit_hash>/<filename.wasm>`.
pub async fn upload_wasm_folder(folder_path: &str) -> Result<(), Box<dyn std::error::Error>> {
// Determine current commit hash
let commit_hash_bytes = std::process::Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.output()?;
let commit_hash = String::from_utf8(commit_hash_bytes.stdout)?;
let commit_hash = commit_hash.trim();

let dir_iter = fs::read_dir(folder_path)?;
for entry in dir_iter {
let entry = entry?;
let file_type = entry.file_type()?;
if !file_type.is_file() {
continue;
}

let path = entry.path();
if path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.eq_ignore_ascii_case("wasm"))
.unwrap_or(false)
{
let file_name = match path.file_name().and_then(|n| n.to_str()) {
Some(name) => name.to_string(),
None => continue,
};

let blob_path = format!("{}/{}", commit_hash, file_name);
let bytes = fs::read(&path)?;
// Upload with deterministic path and proper content type for WASM
upload_blob(&blob_path, bytes, Some("application/wasm")).await?;
log::info!("Uploaded {} to {}", path.display(), blob_path);
} else if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
if file_name.eq_ignore_ascii_case("version_map.json") {
let blob_path = format!("{}/{}", commit_hash, file_name);
let bytes = fs::read(&path)?;
upload_blob(&blob_path, bytes, Some("application/json")).await?;
log::info!("Uploaded {} to {}", path.display(), blob_path);
}
}
}

Ok(())
}
Loading