Skip to content
Closed
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
452 changes: 317 additions & 135 deletions Cargo.lock

Large diffs are not rendered by default.

7 changes: 4 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@ repository = "https://github.com/scroll-tech/scroll"
version = "4.7.1"

[workspace.dependencies]
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.1" }
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.1" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.1" }
# with openvm 1.4.2
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "2e8e29f" }
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "2e8e29f" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "2e8e29f" }

sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2", features = ["scroll", "rkyv"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91.2" }
Expand Down
19 changes: 2 additions & 17 deletions crates/libzkp/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,8 @@ use serde_json::value::RawValue;
use std::{collections::HashMap, path::Path, sync::OnceLock};
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};

pub(crate) fn witness_use_legacy_mode(fork_name: &str) -> eyre::Result<bool> {
ADDITIONAL_FEATURES
.get()
.and_then(|features| features.get(fork_name))
.map(|cfg| cfg.legacy_witness_encoding)
.ok_or_else(|| {
eyre::eyre!(
"can not find features setting for unrecognized fork {}",
fork_name
)
})
}

#[derive(Debug, Default, Clone)]
struct FeatureOptions {
legacy_witness_encoding: bool,
for_openvm_13_prover: bool,
}

Expand All @@ -41,11 +27,10 @@ impl FeatureOptions {
for feat_s in feats.split(':') {
match feat_s.trim().to_lowercase().as_str() {
"legacy_witness" => {
tracing::info!("set witness encoding for legacy mode");
ret.legacy_witness_encoding = true;
tracing::warn!("legacy witness is no longer supported");
}
"openvm_13" => {
tracing::info!("set prover should use openvm 13");
tracing::warn!("set prover should use openvm 13");
ret.for_openvm_13_prover = true;
}
s => tracing::warn!("unrecognized dynamic feature: {s}"),
Expand Down
11 changes: 2 additions & 9 deletions crates/libzkp/src/tasks/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,11 @@ use sbv_primitives::{B256, U256};
use scroll_zkvm_types::{
batch::{
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderValidium,
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, LegacyBatchWitness,
ReferenceHeader, N_BLOB_BYTES,
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, ReferenceHeader, N_BLOB_BYTES,
},
chunk::ChunkInfo,
public_inputs::{ForkName, MultiVersionPublicInputs, Version},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
version::{Codec, Domain, STFVersion},
};

Expand Down Expand Up @@ -118,12 +116,7 @@ pub struct BatchProvingTask {
impl BatchProvingTask {
pub fn into_proving_task_with_precheck(self) -> Result<(ProvingTask, BatchInfo, B256)> {
let (witness, metadata, batch_pi_hash) = self.precheck()?;
let serialized_witness = if crate::witness_use_legacy_mode(&self.fork_name)? {
let legacy_witness = LegacyBatchWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
let serialized_witness = super::encode_task_to_witness(&witness)?;

let proving_task = ProvingTask {
identifier: self.batch_header.batch_hash().to_string(),
Expand Down
10 changes: 2 additions & 8 deletions crates/libzkp/src/tasks/bundle.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
use eyre::Result;
use sbv_primitives::B256;
use scroll_zkvm_types::{
bundle::{BundleInfo, BundleWitness, LegacyBundleWitness},
bundle::{BundleInfo, BundleWitness},
public_inputs::{MultiVersionPublicInputs, Version},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};

use crate::proofs::BatchProof;
Expand All @@ -27,12 +26,7 @@ pub struct BundleProvingTask {
impl BundleProvingTask {
pub fn into_proving_task_with_precheck(self) -> Result<(ProvingTask, BundleInfo, B256)> {
let (witness, bundle_info, bundle_pi_hash) = self.precheck()?;
let serialized_witness = if crate::witness_use_legacy_mode(&self.fork_name)? {
let legacy = LegacyBundleWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
let serialized_witness = super::encode_task_to_witness(&witness)?;

let proving_task = ProvingTask {
identifier: self.identifier(),
Expand Down
10 changes: 2 additions & 8 deletions crates/libzkp/src/tasks/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@ use eyre::Result;
use sbv_core::BlockWitness;
use sbv_primitives::{types::consensus::BlockHeader, B256};
use scroll_zkvm_types::{
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness, ValidiumInputs},
chunk::{execute, ChunkInfo, ChunkWitness, ValidiumInputs},
public_inputs::{MultiVersionPublicInputs, Version},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};

use super::chunk_interpreter::*;
Expand Down Expand Up @@ -117,12 +116,7 @@ impl ChunkProvingTask {

pub fn into_proving_task_with_precheck(self) -> Result<(ProvingTask, ChunkInfo, B256)> {
let (witness, chunk_info, chunk_pi_hash) = self.precheck()?;
let serialized_witness = if crate::witness_use_legacy_mode(&self.fork_name)? {
let legacy_witness = LegacyChunkWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
let serialized_witness = super::encode_task_to_witness(&witness)?;

let proving_task = ProvingTask {
identifier: self.identifier(),
Expand Down
2 changes: 1 addition & 1 deletion crates/prover-bin/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ edition.workspace = true
scroll-zkvm-types.workspace = true
scroll-zkvm-prover.workspace = true
libzkp = { path = "../libzkp"}
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "05648db" }
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "22ad34e" }
serde.workspace = true
serde_json.workspace = true
once_cell.workspace =true
Expand Down
39 changes: 28 additions & 11 deletions crates/prover-bin/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ pub struct AssetsLocationData {
#[serde(default)]
/// a altered url for specififed vk
pub asset_detours: HashMap<String, url::Url>,
/// when asset file existed, do not verify from network, help for debugging stuffs
#[serde(default)]
pub debug_mode: bool,
}

impl AssetsLocationData {
Expand Down Expand Up @@ -79,6 +82,13 @@ impl AssetsLocationData {
// Get file metadata to check size
if let Ok(metadata) = std::fs::metadata(&local_file_path) {
// Make a HEAD request to get remote file size
if self.debug_mode {
println!(
"File {} already exists, skipping download under debugmode",
filename
);
continue;
}

if let Ok(head_resp) = client.head(download_url.clone()).send().await {
if let Some(content_length) = head_resp.headers().get("content-length") {
Expand Down Expand Up @@ -201,12 +211,20 @@ impl ProvingService for LocalProver {
error: Some(format!("proving task failed: {}", e)),
..Default::default()
},
Err(e) => QueryTaskResponse {
task_id: req.task_id,
status: TaskStatus::Failed,
error: Some(format!("proving task panicked: {}", e)),
..Default::default()
},
Err(e) => {
if e.is_panic() {
// simply re-throw panic for any panicking in proving process,
// cause worker loop and the whole prover exit
std::panic::resume_unwind(e.into_panic());
}

QueryTaskResponse {
task_id: req.task_id,
status: TaskStatus::Failed,
error: Some(format!("proving task failed: {}", e)),
..Default::default()
}
}
};
} else {
return QueryTaskResponse {
Expand Down Expand Up @@ -273,7 +291,9 @@ impl LocalProver {
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;

let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
let is_openvm_13 = prover_task.use_openvm_13;
if prover_task.use_openvm_13 {
eyre::bail!("prover do not support snark params base on openvm 13");
}
let prover_task: ProvingTask = prover_task.into();
let vk = hex::encode(&prover_task.vk);
let handler = if let Some(handler) = self.handlers.get(&vk) {
Expand All @@ -300,10 +320,7 @@ impl LocalProver {
.location_data
.get_asset(&vk, &url_base, &base_config.workspace_path)
.await?;
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
&asset_path,
is_openvm_13,
)?));
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(&asset_path)?));
self.handlers.insert(vk, circuits_handler.clone());
circuits_handler
};
Expand Down
5 changes: 2 additions & 3 deletions crates/prover-bin/src/zk_circuits_handler/universal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,14 @@ pub struct UniversalHandler {
unsafe impl Send for UniversalHandler {}

impl UniversalHandler {
pub fn new(workspace_path: impl AsRef<Path>, is_openvm_v13: bool) -> Result<Self> {
pub fn new(workspace_path: impl AsRef<Path>) -> Result<Self> {
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
let path_app_config = workspace_path.as_ref().join("openvm.toml");
let segment_len = Some((1 << 21) - 100);
let segment_len = Some((1 << 22) - 100);
let config = ProverConfig {
path_app_config,
path_app_exe,
segment_len,
is_openvm_v13,
};

let prover = Prover::setup(config, None)?;
Expand Down
3 changes: 3 additions & 0 deletions tests/prover-e2e/mainnet-galileo/.make.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
BEGIN_BLOCK?=26653680
END_BLOCK?=26653686
SCROLL_FORK_NAME=galileo
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
"maxOpenNum": 5,
"maxIdleNum": 1
},
"fetch_config": {
"endpoint": "https://mainnet-rpc.scroll.io",
"l2_message_queue_address": "0x5300000000000000000000000000000000000000"
},
"validium_mode": false,
"codec_version": 8
"codec_version": 9
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
"min_prover_version": "v4.4.33",
"verifiers": [
{
"features": "legacy_witness:openvm_13",
"assets_path": "assets_feynman",
"fork_name": "feynman"
"assets_path": "assets_galileo",
"fork_name": "galileo"
}
]
}
Expand All @@ -25,7 +24,7 @@
},
"l2": {
"validium_mode": false,
"chain_id": 534351,
"chain_id": 534352,
"l2geth": {
"endpoint": "<serach a public rpc endpoint like alchemy>"
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Typo in placeholder comment.

Minor typo: "serach" should be "search".

     "l2geth": {
-      "endpoint": "<serach a public rpc endpoint like alchemy>"
+      "endpoint": "<search a public rpc endpoint like alchemy>"
     }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
"endpoint": "<serach a public rpc endpoint like alchemy>"
"l2geth": {
"endpoint": "<search a public rpc endpoint like alchemy>"
}
🤖 Prompt for AI Agents
In tests/prover-e2e/mainnet-galileo/config.template.json around line 29, the
placeholder comment contains a typo ("serach"); update the placeholder to read
"search a public rpc endpoint like alchemy" so the comment is spelled correctly
and clearer to users.

}
Expand Down
Loading