Skip to content

Commit

Permalink
review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
temaniarpit27 committed Nov 13, 2024
1 parent 00ae12e commit 3a30886
Show file tree
Hide file tree
Showing 7 changed files with 108 additions and 65 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
--- # Proof generation benchmarking workflow
---
# Proof generation benchmarking workflow

name: Benchmark proving

Expand Down
54 changes: 27 additions & 27 deletions .github/workflows/cron_jerigon_zero_testing.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
---
# Cron running every sunday at 12PM UTC which will run the integration and benchmarking tests for Jerigon Zero

name: Jerigon Zero Testing

on:
# Uncomment when ready to run on a schedule
# TODO - Change this before merge
# # Uncomment when ready to run on a schedule
# schedule:
# # Run every Sunday at 12:00 AM (UTC)
# - cron: "0 0 * * 0"
# # Run every Sunday at 12:00 PM (UTC)
# - cron: "0 12 * * SUN"
push:
branches: [develop]
pull_request:
Expand All @@ -21,22 +25,18 @@ env:
jobs:
jerigon_zero_testing:
name: Jerigon Zero Testing - Integration and Benchmarking
runs-on: zero-ci
runs-on: zero-reg
concurrency:
group: jerigon_zero_testing
cancel-in-progress: true
steps:
- name: Checkout zk_evm code
uses: actions/checkout@v4

- name: Setup Rust Toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1

- name: Set up Rust Cache
uses: Swatinem/rust-cache@v2
with:
cache-on-failure: true
- uses: actions/checkout@v4
- uses: ./.github/actions/rust

# Build optimized for the native CPU with `lld` linker and allow detailed performance profiling.
- name: Build the project
run: |
RUSTFLAGS='-C target-cpu=native -Zlinker-features=-lld -Copt-level=3' cargo build --release
RUSTFLAGS='-C target-cpu=native -Zlinker-features=-lld' cargo build --release
sudo sysctl kernel.perf_event_paranoid=0
- name: Set up QEMU
Expand All @@ -52,9 +52,10 @@ jobs:
- name: Run Erigon Network
run: |
cd ..
tar xf "$(pwd)/zk_evm/test_data/erigon-data.tar.gz" || {
echo "Failed to extract erigon-data.tar.gz"; exit 1;
}
curl -o erigon-test-data.tar.gz \
https://224d54070bdacdb423e0b7594ebdc231.ipfscdn.io/ipfs/bafybeigpm34mzhebd24uljuntmhulo6dpniqf75xtxcviztemb2fi5q74i \
|| { echo "Failed to download erigon.tar.gz"; exit 1; }
tar xf ./erigon-test-data.tar.gz
docker pull ghcr.io/0xpolygonzero/erigon:feat-zero
docker run -d --name erigon \
-p 8545:8545 \
Expand All @@ -73,30 +74,29 @@ jobs:
run: |
export ETH_RPC_URL="http://localhost:8545"
rm -rf proofs/* circuits/* ./proofs.json test.out verify.out leader.out
random_numbers=($(shuf -i 1-500 -n 5))
for number in "${random_numbers[@]}"; do
hex_number="0x$(echo "obase=16; $number" | bc)"
OUTPUT_TO_TERMINAL=true RUN_VERIFICATION=true ./scripts/prove_rpc.sh $hex_number $hex_number $ETH_RPC_URL jerigon true 3000 100
done
# TODO - Change block number and checkpoint before merge use 1000 to 1100
OUTPUT_TO_TERMINAL=true ./scripts/prove_rpc.sh 1 1 $ETH_RPC_URL jerigon 0 3000 100 test_only
- name: Download Previous Results
uses: dawidd6/action-download-artifact@v6
with:
workflow: cron_jerigon_zero_testing.yml
workflow_conclusion: success
name: jerigon_zero_benchmark
name: jerigon_zero_testing
path: ./
if_no_artifact_found: ignore

- name: Run the Benchmark Script
run: |
export ETH_RPC_URL="http://localhost:8545"
./scripts/jerigon_zero_benchmark.sh
./scripts/jerigon_zero_benchmark.bash
- name: Upload New Results
uses: actions/upload-artifact@v4
with:
name: jerigon_zero_benchmark
path: ./jerigon_zero_output.log
name: jerigon_zero_testing
path: |
./jerigon_zero_benchmark.log
./jerigon_zero_error.log
retention-days: 90
overwrite: true
13 changes: 7 additions & 6 deletions .github/workflows/yamllint.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
--- # Run yamllint on all YAML files in the repository
---
# Run yamllint on all YAML files in the repository

name: Yamllint
'on':
"on":
pull_request:
paths:
- '**/*.yml'
- '.github/**'
- "**/*.yml"
- ".github/**"
workflow_dispatch:
branches:
- '**'
- "**"

jobs:
yamllint-check:
Expand All @@ -22,5 +23,5 @@ jobs:
run: >
yamllint
--format github
-d "{extends: default, rules: {line-length: {max: 120}, truthy: {check-keys: false}}}"
-d "{extends: default, rules: {line-length: {max: 130}, truthy: {check-keys: false}}}"
.github scripts
1 change: 1 addition & 0 deletions resp.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"jsonrpc":"2.0","id":1,"result":{"trie_pre_images":{"combined":{"compact":"0x"}},"code_db":{},"txn_info":[{"traces":{"0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266":{"balance":"0xd3c21bce8022181a02cd","nonce":"0x3"},"0x0000000000000000000000000000000000000000":{"balance":"0x3bbc3b274218","nonce":"0x0"},"0x852da15b70a3e197d1d668a9a481b1f4c2168a5d":{"balance":"0x3","nonce":"0x0"}},"meta":{"byte_code":"0xf86602843b9aca07830186a094852da15b70a3e197d1d668a9a481b1f4c2168a5d0180820a95a0d5081ac0967a518e50ccdcb92d9eed98b71d5dfa9b38ccf3c63adc429d9d13a6a058c166e24381b70ceff2cbe22786a686e62a004ac82aecf0516c91dd53ef2e25","new_receipt_trie_node_byte":"0xf9010801825208b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0","gas_used":21000}}]}}
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
#!/bin/bash
# ------------------------------------------------------------------------------
set -exo pipefail

# Args:
# 1 --> Output file (Not used in the current script)
#!/usr/bin/env bash
set -euxo pipefail

# Get the number of processors for parallelism
if [[ "$OSTYPE" == "darwin"* ]]; then
Expand All @@ -18,8 +14,9 @@ PROOF_OUTPUT_DIR="${REPO_ROOT}/proofs"
BLOCK_BATCH_SIZE="${BLOCK_BATCH_SIZE:-8}"

# Logging setup
OUTPUT_LOG="jerigon_zero_output.log"
OUTPUT_LOG="jerigon_zero_benchmark.log"
BLOCK_OUTPUT_LOG="jerigon_zero_block_output.log"
ERROR_LOG="jerigon_zero_error.log"
PROOFS_FILE_LIST="${PROOF_OUTPUT_DIR}/proof_files.json"

# Ensure necessary directories exist
Expand All @@ -33,57 +30,58 @@ export RUST_BACKTRACE=full
export RUST_LOG=info

# Log the current date and time
echo "$(date +"%Y-%m-%d %H:%M:%S")" &>> "$OUTPUT_LOG"

# Define the blocks to process
blocks=(100 200 300 400 500)
date +"%Y-%m-%d %H:%M:%S" &>> "$OUTPUT_LOG"
date +"%Y-%m-%d %H:%M:%S" &>> "$ERROR_LOG"

# Function to process each block
process_block() {
local block=$1

echo "Processing block: $block" &>> "$OUTPUT_LOG"
local block start_time end_time duration_sec PERF_TIME PERF_USER_TIME PERF_SYS_TIME
block=$1

# Fetch block data
if ! ./target/release/rpc --rpc-url "$ETH_RPC_URL" fetch --start-block "$block" --end-block "$block" > "output_${block}.json"; then
echo "Failed to fetch block data for block: $block" &>> "$OUTPUT_LOG"
exit 1
if ! ./target/release/rpc --rpc-url "$ETH_RPC_URL" fetch --start-block "$block" --end-block "$block" > "witness_${block}.json"; then
echo "Failed to fetch block data for block: $block" &>> "$ERROR_LOG"
return
fi

local start_time=$(date +%s%N)
start_time=$(date +%s%N)

# Run performance stats
if ! perf stat -e cycles ./target/release/leader --runtime in-memory --load-strategy monolithic --block-batch-size "$BLOCK_BATCH_SIZE" --proof-output-dir "$PROOF_OUTPUT_DIR" stdio < "output_${block}.json" &> "$BLOCK_OUTPUT_LOG"; then
if ! perf stat -e cycles ./target/release/leader --runtime in-memory --use-test-config --load-strategy on-demand --block-batch-size "$BLOCK_BATCH_SIZE" --proof-output-dir "$PROOF_OUTPUT_DIR" stdio < "witness_${block}.json" &> "$BLOCK_OUTPUT_LOG"; then
echo "Performance command failed for block: $block" &>> "$OUTPUT_LOG"
cat "$BLOCK_OUTPUT_LOG" &>> "$OUTPUT_LOG"
exit 1
cat "$BLOCK_OUTPUT_LOG" &>> "$ERROR_LOG"
return
fi

local end_time=$(date +%s%N)
end_time=$(date +%s%N)

set +o pipefail
if ! cat "$BLOCK_OUTPUT_LOG" | grep "Successfully wrote to disk proof file " | awk '{print $NF}' | tee "$PROOFS_FILE_LIST"; then
if ! grep "Successfully wrote to disk proof file " "$BLOCK_OUTPUT_LOG" | awk '{print $NF}' | tee "$PROOFS_FILE_LIST"; then
echo "Proof list not generated for block: $block. Check the log for details." &>> "$OUTPUT_LOG"
cat "$BLOCK_OUTPUT_LOG" &>> "$OUTPUT_LOG"
exit 1
cat "$BLOCK_OUTPUT_LOG" &>> "$ERROR_LOG"
return
fi

local duration_sec=$(echo "scale=3; ($end_time - $start_time) / 1000000000" | bc -l)
duration_sec=$(echo "scale=3; ($end_time - $start_time) / 1000000000" | bc -l)

# Extract performance timings
local PERF_TIME=$(grep "seconds time elapsed" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}')
local PERF_USER_TIME=$(grep "seconds user" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}')
local PERF_SYS_TIME=$(grep "seconds sys" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}')
PERF_TIME=$(grep "seconds time elapsed" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}')
PERF_USER_TIME=$(grep "seconds user" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}')
PERF_SYS_TIME=$(grep "seconds sys" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}')

echo "Success for block: $block!"
echo "Proving duration for block $block: $duration_sec seconds, performance time: $PERF_TIME, performance user time: $PERF_USER_TIME, performance system time: $PERF_SYS_TIME" &>> "$OUTPUT_LOG"
}

# Process each block
for block in "${blocks[@]}"; do
process_block "$block"
# TODO - Change block numbers before merge use 700 to 1000
for i in $(seq 1 2); do
process_block "$i"
done

# Finalize logging
echo "Processing completed at: $(date +"%Y-%m-%d %H:%M:%S")" &>> "$OUTPUT_LOG"
echo "" &>> "$OUTPUT_LOG"

echo "Processing completed at: $(date +"%Y-%m-%d %H:%M:%S")" &>> "$ERROR_LOG"
echo "" &>> "$ERROR_LOG"
Binary file removed test_data/erigon-data.tar.gz
Binary file not shown.
42 changes: 42 additions & 0 deletions zero/src/rpc/zeth.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
use alloy::{
primitives::B256, providers::Provider, rpc::types::eth::BlockId, transports::Transport,
};
use trace_decoder::BlockTrace;

use super::{fetch_other_block_data, CachedProvider};
use crate::prover::BlockProverInput;

pub async fn block_prover_input<ProviderT, TransportT>(
cached_provider: std::sync::Arc<CachedProvider<ProviderT, TransportT>>,
target_block_id: BlockId,
checkpoint_block_number: u64,
) -> anyhow::Result<BlockProverInput>
where
ProviderT: Provider<TransportT>,
TransportT: Transport + Clone,
{
let block_number = match target_block_id {
BlockId::Number(block_number) => block_number,
_ => return Err(anyhow::anyhow!("block number expected")),
};

println!("Fetching block trace for block number: {}", block_number);

let block_trace: BlockTrace = cached_provider
.get_provider()
.await?
.raw_request("zero_getBlockTraceByNumber".into(), vec![block_number])
.await?;

println!("trace: {:?}", block_trace);

let other_data =
fetch_other_block_data(cached_provider, target_block_id, checkpoint_block_number).await?;
println!("other_data: {:?}", other_data);

// Assemble
Ok(BlockProverInput {
block_trace,
other_data,
})
}

0 comments on commit 3a30886

Please sign in to comment.