Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
toolchain: nightly-2025-02-16
override: true
- run: rustup component add rustfmt
- uses: actions-rs/cargo@v1
Expand All @@ -60,7 +60,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
toolchain: nightly-2025-02-16
override: true
- name: Install Dependencies
run: sudo apt install protobuf-compiler
Expand Down
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ help: ## Display this help screen
awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'

clippy: ## Run clippy checks over all workspace members and formatted correctly
@cargo check
@cargo fmt --all -- --check
@cargo clippy --all-targets -- -D warnings --no-deps
#@cargo fmt --all -- --check
#@cargo clippy --all-targets -- -D warnings --no-deps
@cargo clippy -- -D warnings --no-deps

fix: ## Automatically apply lint suggestions. This flag implies `--no-deps` and `--all-targets`
@cargo clippy --fix
Expand Down
4 changes: 2 additions & 2 deletions prover-core/src/stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ impl Stage {
format!("{task_id}/batch_proof")
}
Self::Aggregate(task_id, _, _) => format!("{task_id}/agg_proof"),
Self::Final(task_id, _) => format!("{task_id}/snark_proof"),
Self::Final(task_id, _) => format!("{task_id}_final/snark_proof"),
}
}

Expand Down Expand Up @@ -49,7 +49,7 @@ mod tests {
#[test]
fn test_final_stage_path() {
let stage = Stage::Final("task_id".to_string(), "prover_addr".to_string());
assert_eq!(stage.path(), "task_id/snark_proof");
assert_eq!(stage.path(), "task_id_final/snark_proof");
}

#[test]
Expand Down
27 changes: 1 addition & 26 deletions prover/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ prover-core = { path = "../prover-core" }

metrics = { path = "../metrics" }

powdr = { git = "https://github.com/0xEigenLabs/powdr", branch = "eigen/v1", default-features = false }
#powdr = { git = "https://github.com/0xEigenLabs/powdr", branch = "eigen/v1", default-features = false }

tokio = { version = "1.21.0", features = ["macros", "rt-multi-thread", "signal", "sync"] }
uuid = { version = "1.2", features = ["v4", "fast-rng", "macro-diagnostics"] }
Expand All @@ -55,28 +55,3 @@ env_logger = "0.10"
#default = ["algebraic/default", "groth16/default"]
#avx512 = ["starky/avx512", "recursion/avx512", "zkvm/avx512", "powdr/starky-avx512"]
#sp1_prover = ["sp1-sdk", "sp1-build"]

#[patch.crates-io]
## SHA2
#sha2-v0-9-9 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", tag = "patch-sha2-0.9.9-sp1-4.0.0" }
#sha2-v0-10-6 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", tag = "patch-sha2-0.10.6-sp1-4.0.0" }
#sha2-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha2", tag = "patch-sha2-0.10.8-sp1-4.0.0" }
## SHA3
#sha3-v0-10-8 = { git = "https://github.com/sp1-patches/RustCrypto-hashes", package = "sha3", tag = "patch-sha3-0.10.8-sp1-4.0.0" }
## BigInt
#crypto-bigint = { git = "https://github.com/sp1-patches/RustCrypto-bigint", tag = "patch-0.5.5-sp1-4.0.0" }
## Keccak
#tiny-keccak = { git = "https://github.com/sp1-patches/tiny-keccak", tag = "patch-2.0.2-sp1-4.0.0" }
## Ed25519
#curve25519-dalek = { git = "https://github.com/sp1-patches/curve25519-dalek", tag = "patch-4.1.3-sp1-4.0.0" }
#curve25519-dalek-ng = { git = "https://github.com/sp1-patches/curve25519-dalek-ng", tag = "patch-4.1.1-sp1-4.0.0" }
## ECDSA
#k256 = { git = "https://github.com/sp1-patches/elliptic-curves", tag = "patch-k256-13.4-sp1-4.1.0" }
#p256 = { git = "https://github.com/sp1-patches/elliptic-curves", tag = "patch-p256-13.2-sp1-4.1.0" }
#secp256k1 = { git = "https://github.com/sp1-patches/rust-secp256k1", tag = "patch-0.29.1-sp1-4.0.0" }
## BN254
#substrate-bn = { git = "https://github.com/sp1-patches/bn", tag = "patch-0.6.0-sp1-4.0.0" }
## BLS12-381
#bls12_381 = { git = "https://github.com/sp1-patches/bls12_381", tag = "patch-0.8.0-sp1-4.0.0-v2" }
## RSA
#rsa = { git = "https://github.com/sp1-patches/RustCrypto-RSA/", tag = "patch-0.9.6-sp1-4.0.0" }
12 changes: 7 additions & 5 deletions prover/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ impl Pipeline {
fn save_checkpoint(&self, key: &String, finished: bool) -> Result<String> {
let binding = self.task_map.lock().unwrap();
let task = binding.get(key);

log::debug!("task: {:?}", task);
if let Some(status) = task {
// mkdir
let workdir = Path::new(&self.basedir).join(status.path());
Expand Down Expand Up @@ -172,15 +172,17 @@ impl Pipeline {

if let Some(stage) = task {
// mkdir
let workdir = Path::new(&self.basedir).join(stage.path());
let workdir: std::path::PathBuf = Path::new(&self.basedir).join(stage.path());
log::info!("load_final_proof_and_input, workdir: {:?}", workdir);

let proof_path = workdir.clone().join("proof.json");
let proof_path = workdir.clone().join("../proof_bls12381.json");

let proof = std::fs::read_to_string(proof_path.clone()).map_err(|e| {
anyhow!("Failed to load the proof.json: {:?}, err: {}", proof_path, e)
})?;

let input_path = workdir.join("public_input.json");
let input_path = workdir.clone().join("../public_inputs_bls12381.json");

let input = std::fs::read_to_string(input_path.clone()).map_err(|e| {
anyhow!("Failed to load the public_input.json: {:?}, err: {}", input_path, e)
})?;
Expand All @@ -192,7 +194,7 @@ impl Pipeline {
}

pub fn batch_prove(&mut self, task_id: String, l2_batch_data: String) -> Result<String> {
let key = task_id.clone(); //self.get_key(&task_id, &chunk_id);
let key = task_id.clone();
match self.task_map.get_mut() {
Ok(w) => {
self.queue.push_back(key.clone());
Expand Down
37 changes: 21 additions & 16 deletions service/src/prover_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,17 +397,15 @@ impl ProverHandler for ProverRequestHandler {
// key: format!("{}_{}", task_id, chunk_id)
// pending task
let mut pending_tasks = Vec::<String>::new();
for chunk_id in 0..cnt_chunk {
pending_tasks.push(format!("{}_{}", execute_task_id, chunk_id))
}
pending_tasks.push(execute_task_id.clone());

let mut finished_tasks = vec![];
let mut results = vec![String::new(); cnt_chunk];

// put the task into the pipeline, skip the finished tasks
for (index, key) in pending_tasks.iter().enumerate() {
// let proof_result = PIPELINE.lock().unwrap().get_proof(key.clone(), 0);
let tmp_stage = Stage::Batch(execute_task_id.clone(), l2_batch_data.clone());
let tmp_stage = Stage::Batch(key.clone(), l2_batch_data.clone());
let task_result_dir =
Path::new(&*BASE_DIR).join(tmp_stage.path()).join("status.finished");
log::info!("check the task status: {}", task_result_dir.display());
Expand Down Expand Up @@ -531,7 +529,8 @@ impl ProverHandler for ProverRequestHandler {
let chunk_proof = ChunkProof {
chunk_id: chunk_id as u64,
proof_key: chunk_proof_key.clone(),
proof: format!("{}_chunk_{}", execute_task_id, chunk_id),
// proof: format!("{}_chunk_{}", execute_task_id, chunk_id),
proof: execute_task_id.clone(),
};

batch_proof_result.chunk_proofs.push(chunk_proof);
Expand All @@ -556,7 +555,6 @@ impl ProverHandler for ProverRequestHandler {
msg_id: String,
request: GenAggregatedProofRequest,
) -> Result<ProverResponse> {
// put the task into the pipeline
let task_id = match PIPELINE
.lock()
.unwrap()
Expand All @@ -576,15 +574,16 @@ impl ProverHandler for ProverRequestHandler {

log::info!("polling the agg proof of agg_task: {:?}, request id {:?}", task_id, msg_id);

let checkpoint_key = format!("{}_agg", task_id.clone());
log::debug!("task_id: {:?}", task_id.clone());
// let result_key: String;
loop {
tokio::select! {
_ = polling_ticker.tick() => {
let proof_result = PIPELINE.lock().unwrap().get_proof(checkpoint_key.clone(), 0);
let proof_result = PIPELINE.lock().unwrap().get_proof(task_id.clone(), 0);
match proof_result {
Ok(_) => {
// result_key = task_key;
log::info!("finished the agg stage!");
break;
}
Err(_) => {
Expand All @@ -601,6 +600,12 @@ impl ProverHandler for ProverRequestHandler {
}
}

log::debug!(
"Finished the task of generate agg proof, task id: {:?}, request id {:?}",
task_id,
msg_id
);

Ok(ProverResponse {
id: msg_id,
response_type: Some(ResponseType::GenAggregatedProof(GenAggregatedProofResponse {
Expand Down Expand Up @@ -636,15 +641,9 @@ impl ProverHandler for ProverRequestHandler {

log::info!("polling the final proof of agg_task: {:?}, request id {:?}", task_id, msg_id);

// let checkpoint_key = format!("{}_final", task_id.clone());

let prover_type: ProverType =
std::env::var("PROVER_TYPE").unwrap_or("eigen".to_string()).into();
let checkpoint_key: String = format!("{}_final", task_id.clone());

let checkpoint_key: String = match prover_type {
ProverType::Eigen => format!("{}_final", task_id.clone()),
ProverType::SP1 => format!("{}_agg", task_id.clone()),
};
log::debug!("checkpoint_key: {:?}", checkpoint_key.clone());
loop {
tokio::select! {
_ = polling_ticker.tick() => {
Expand All @@ -667,6 +666,12 @@ impl ProverHandler for ProverRequestHandler {
}
}

log::debug!(
"Finished the task of generate final proof, task id: {:?}, request id {:?}",
task_id,
msg_id
);

let (proof, public_input) =
PIPELINE.lock().unwrap().load_final_proof_and_input(&checkpoint_key)?;

Expand Down
Loading