Skip to content

Commit

Permalink
Implement JumpDest fetching from RPC.
Browse files Browse the repository at this point in the history
Automated testing missing.
  • Loading branch information
einar-polygon committed Aug 18, 2024
1 parent 0e55293 commit 270256f
Show file tree
Hide file tree
Showing 24 changed files with 525 additions and 37 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ eth_trie = "0.4.0"
ethereum-types = "0.14.1"
futures = "0.3.30"
hashbrown = "0.14.5"
hex = "0.4.3"
hex = { version = "0.4.3", features = ["serde"] }
hex-literal = "0.4.1"
impl-codec = "0.6.0"
impl-num-traits = "0.1.2"
Expand Down
4 changes: 2 additions & 2 deletions evm_arithmetization/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@ homepage.workspace = true
keywords.workspace = true

[dependencies]
__compat_primitive_types = { workspace = true }
anyhow = { workspace = true }
bytes = { workspace = true }
env_logger = { workspace = true }
ethereum-types = { workspace = true }
hex = { workspace = true, optional = true }
hex = { workspace = true }
hex-literal = { workspace = true }
itertools = { workspace = true }
keccak-hash = { workspace = true }
Expand Down Expand Up @@ -55,7 +56,6 @@ ripemd = { workspace = true }

[features]
default = ["parallel"]
asmtools = ["hex"]
parallel = [
"plonky2/parallel",
"plonky2_maybe_rayon/parallel",
Expand Down
1 change: 1 addition & 0 deletions evm_arithmetization/benches/fibonacci_25m_gas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ fn prepare_setup() -> anyhow::Result<GenerationInputs> {
prev_hashes: vec![H256::default(); 256],
cur_hash: H256::default(),
},
jumpdest_table: Default::default(),
})
}

Expand Down
84 changes: 76 additions & 8 deletions evm_arithmetization/src/cpu/kernel/interpreter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
//! jumpdest table, before the actual CPU carries on with contract execution.
use std::collections::{BTreeSet, HashMap};
use std::str::FromStr;

use anyhow::anyhow;
use ethereum_types::{BigEndianHash, U256};
use ethereum_types::{BigEndianHash, H256, U256};
use log::Level;
use mpt_trie::partial_trie::PartialTrie;
use plonky2::field::types::Field;
Expand All @@ -19,6 +20,10 @@ use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::generation::debug_inputs;
use crate::generation::mpt::load_all_mpts;
use crate::generation::prover_input::{
get_proofs_and_jumpdests, CodeDb, ContextJumpDests, JumpDestTableProcessed,
JumpDestTableWitness,
};
use crate::generation::rlp::all_rlp_prover_inputs_reversed;
use crate::generation::state::{
all_withdrawals_prover_inputs_reversed, GenerationState, GenerationStateCheckpoint,
Expand Down Expand Up @@ -52,6 +57,7 @@ pub(crate) struct Interpreter<F: Field> {
/// Counts the number of appearances of each opcode. For debugging purposes.
#[allow(unused)]
pub(crate) opcode_count: [usize; 0x100],
/// A table of contexts and their reached JUMPDESTs.
jumpdest_table: HashMap<usize, BTreeSet<usize>>,
/// `true` if the we are currently carrying out a jumpdest analysis.
pub(crate) is_jumpdest_analysis: bool,
Expand All @@ -60,14 +66,15 @@ pub(crate) struct Interpreter<F: Field> {
pub(crate) clock: usize,
}

/// Simulates the CPU execution from `state` until the program counter reaches
/// `final_label` in the current context.
pub(crate) fn simulate_cpu_and_get_user_jumps<F: Field>(
final_label: &str,
state: &GenerationState<F>,
) -> Option<HashMap<usize, Vec<usize>>> {
) -> (
Option<HashMap<usize, Vec<usize>>>,
HashMap<usize, BTreeSet<usize>>,
) {
match state.jumpdest_table {
Some(_) => None,
Some(_) => (None, Default::default()),
None => {
let halt_pc = KERNEL.global_labels[final_label];
let initial_context = state.registers.context;
Expand All @@ -82,14 +89,60 @@ pub(crate) fn simulate_cpu_and_get_user_jumps<F: Field>(

interpreter
.generation_state
.set_jumpdest_analysis_inputs(interpreter.jumpdest_table);
.set_jumpdest_analysis_inputs(interpreter.jumpdest_table.clone());

log::debug!("Simulated CPU for jumpdest analysis halted.");
interpreter.generation_state.jumpdest_table
(
interpreter.generation_state.jumpdest_table,
interpreter.jumpdest_table,
)
}
}
}

/// Computes the JUMPDEST proofs for each context.
///
/// # Arguments
///
/// - `jumpdest_table_rpc`: The raw table received from RPC.
/// - `code_db`: The corresponding database of contract code used in the trace.
pub(crate) fn set_jumpdest_analysis_inputs_rpc(
jumpdest_table_rpc: &JumpDestTableWitness,
code_db: &CodeDb,
) -> JumpDestTableProcessed {
let ctx_proofs = jumpdest_table_rpc
.0
.iter()
.flat_map(|(code_addr, ctx_jumpdests)| {
prove_context_jumpdests(&code_db[code_addr], ctx_jumpdests)
})
.collect();
JumpDestTableProcessed(ctx_proofs)
}

/// Orchestrates the proving of all contexts in a specific bytecode.
///
/// # Arguments
///
/// - `ctx_jumpdests`: Map from `ctx` to its list of offsets to reached
/// `JUMPDEST`s.
/// - `code`: The bytecode for the contexts. This is the same for all contexts.
fn prove_context_jumpdests(
code: &[u8],
ctx_jumpdests: &ContextJumpDests,
) -> HashMap<usize, Vec<usize>> {
ctx_jumpdests
.0
.iter()
.map(|(&ctx, jumpdests)| {
let proofs = jumpdests.last().map_or(Vec::default(), |&largest_address| {
get_proofs_and_jumpdests(code, largest_address, jumpdests.clone())
});
(ctx, proofs)
})
.collect()
}

impl<F: Field> Interpreter<F> {
/// Returns an instance of `Interpreter` given `GenerationInputs`, and
/// assuming we are initializing with the `KERNEL` code.
Expand Down Expand Up @@ -508,14 +561,29 @@ impl<F: Field> State<F> for Interpreter<F> {

let op = decode(registers, opcode)?;

// log here
fill_op_flag(op, &mut row);

self.fill_stack_fields(&mut row)?;

if registers.is_kernel {
log_kernel_instruction(self, op);
} else {
self.log_debug(format!("User instruction: {:?}", op));
//self.log_debug(format!("User instruction: {:?} CTX {:?}", op,
let hash = "0xc02ea02af1da253b9cf3d1de648c3355211f490cf4b8641b3146e69450870ba6";
let debug_tx = H256::from_str(hash).unwrap();
let curr_tx =
keccak_hash::keccak(self.generation_state.inputs.signed_txn.as_ref().unwrap());

if curr_tx == debug_tx {
log::info!(
"JMP: {:<5} TX: {:?} CTX: {:<2} OP: {:<10?}",
self.is_jumpdest_analysis,
curr_tx,
registers.code_context(),
op
);
}
}

let generation_state = self.get_mut_generation_state();
Expand Down
2 changes: 2 additions & 0 deletions evm_arithmetization/src/cpu/kernel/tests/add11.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ fn test_add11_yml() {
prev_hashes: vec![H256::default(); 256],
cur_hash: H256::default(),
},
jumpdest_table: Default::default(),
};

let initial_stack = vec![];
Expand Down Expand Up @@ -376,6 +377,7 @@ fn test_add11_yml_with_exception() {
prev_hashes: vec![H256::default(); 256],
cur_hash: H256::default(),
},
jumpdest_table: Default::default(),
};

let initial_stack = vec![];
Expand Down
4 changes: 4 additions & 0 deletions evm_arithmetization/src/generation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use prover_input::JumpDestTableWitness;
use serde::{Deserialize, Serialize};
use starky::config::StarkConfig;
use GlobalMetadata::{
Expand Down Expand Up @@ -78,6 +79,9 @@ pub struct GenerationInputs {
/// The hash of the current block, and a list of the 256 previous block
/// hashes.
pub block_hashes: BlockHashes,

/// A jumptable describing each JUMPDEST reached.
pub jumpdest_table: JumpDestTableWitness,
}

#[derive(Clone, Debug, Deserialize, Serialize, Default)]
Expand Down
Loading

0 comments on commit 270256f

Please sign in to comment.