Skip to content
This repository has been archived by the owner on Jun 20, 2024. It is now read-only.

Commit

Permalink
remove arc and refactor code
Browse files Browse the repository at this point in the history
  • Loading branch information
frisitano committed Jun 9, 2024
1 parent 7cd3d62 commit 2cbcf98
Show file tree
Hide file tree
Showing 9 changed files with 221 additions and 245 deletions.
2 changes: 1 addition & 1 deletion rpc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ hex = "0.4.3"
hex-literal = "0.4.1"
itertools = "0.13.0"
url = "2.5.0"
primitive-types = "0.12.2"
__compat_primitive_types = { version = "0.12.2", package = "primitive-types" }
tower = { version = "0.4" , features = ["retry"] }

# Local dependencies
Expand Down
52 changes: 25 additions & 27 deletions rpc/src/compat.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
pub trait ToPrimitive<Out> {
fn to_primitive(self) -> Out;
pub trait Compat<Out> {
fn compat(self) -> Out;
}

impl ToPrimitive<primitive_types::H160> for alloy::primitives::Address {
fn to_primitive(self) -> primitive_types::H160 {
impl Compat<__compat_primitive_types::H160> for alloy::primitives::Address {
fn compat(self) -> __compat_primitive_types::H160 {
let alloy::primitives::Address(alloy::primitives::FixedBytes(arr)) = self;
primitive_types::H160(arr)
__compat_primitive_types::H160(arr)
}
}

impl ToPrimitive<primitive_types::H256> for alloy::primitives::B256 {
fn to_primitive(self) -> primitive_types::H256 {
impl Compat<__compat_primitive_types::H256> for alloy::primitives::B256 {
fn compat(self) -> __compat_primitive_types::H256 {
let alloy::primitives::FixedBytes(arr) = self;
primitive_types::H256(arr)
__compat_primitive_types::H256(arr)
}
}

impl ToPrimitive<[primitive_types::U256; 8]> for alloy::primitives::Bloom {
fn to_primitive(self) -> [primitive_types::U256; 8] {
impl Compat<[__compat_primitive_types::U256; 8]> for alloy::primitives::Bloom {
fn compat(self) -> [__compat_primitive_types::U256; 8] {
let alloy::primitives::Bloom(alloy::primitives::FixedBytes(src)) = self;
// have u8 * 256
// want U256 * 8
Expand All @@ -26,44 +26,42 @@ impl ToPrimitive<[primitive_types::U256; 8]> for alloy::primitives::Bloom {
let dst = core::array::from_fn(|_ix| {
// This is a bit spicy because we're going from an uninterpeted array of bytes
// to wide integers, but we trust this `From` impl to do the right thing
primitive_types::U256::from(<[u8; 32]>::try_from(chunks.next().unwrap()).unwrap())
__compat_primitive_types::U256::from(
<[u8; 32]>::try_from(chunks.next().unwrap()).unwrap(),
)
});
assert_eq!(chunks.len(), 0);
dst
}
}

impl ToPrimitive<primitive_types::U256> for alloy::primitives::U256 {
fn to_primitive(self) -> primitive_types::U256 {
primitive_types::U256(self.into_limbs())
impl Compat<__compat_primitive_types::U256> for alloy::primitives::U256 {
fn compat(self) -> __compat_primitive_types::U256 {
__compat_primitive_types::U256(self.into_limbs())
}
}

impl ToPrimitive<Vec<Vec<u8>>> for Vec<alloy::primitives::Bytes> {
fn to_primitive(self) -> Vec<Vec<u8>> {
impl Compat<Vec<Vec<u8>>> for Vec<alloy::primitives::Bytes> {
fn compat(self) -> Vec<Vec<u8>> {
self.into_iter().map(|x| x.to_vec()).collect()
}
}

pub trait ToAlloy<Out> {
fn to_alloy(self) -> Out;
}

impl ToAlloy<alloy::primitives::Address> for primitive_types::H160 {
fn to_alloy(self) -> alloy::primitives::Address {
let primitive_types::H160(arr) = self;
impl Compat<alloy::primitives::Address> for __compat_primitive_types::H160 {
fn compat(self) -> alloy::primitives::Address {
let __compat_primitive_types::H160(arr) = self;
alloy::primitives::Address(alloy::primitives::FixedBytes(arr))
}
}

impl ToAlloy<alloy::primitives::StorageKey> for primitive_types::H256 {
fn to_alloy(self) -> alloy::primitives::StorageKey {
let primitive_types::H256(arr) = self;
impl Compat<alloy::primitives::StorageKey> for __compat_primitive_types::H256 {
fn compat(self) -> alloy::primitives::StorageKey {
let __compat_primitive_types::H256(arr) = self;
alloy::primitives::FixedBytes(arr)
}
}

#[test]
fn bloom() {
let _did_not_panic = alloy::primitives::Bloom::ZERO.to_primitive();
let _did_not_panic = alloy::primitives::Bloom::ZERO.compat();
}
2 changes: 1 addition & 1 deletion rpc/src/jerigon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use serde::Deserialize;
use serde_json::json;
use trace_decoder::trace_protocol::{BlockTrace, BlockTraceTriePreImages, TxnInfo};

use super::metadata::fetch_other_block_data;
use super::fetch_other_block_data;

#[derive(Deserialize, Debug)]
#[serde(rename_all = "snake_case")]
Expand Down
112 changes: 111 additions & 1 deletion rpc/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,115 @@
use alloy::{
providers::Provider,
rpc::types::eth::{BlockId, BlockTransactionsKind, Withdrawal},
transports::Transport,
};
use anyhow::Context as _;
use evm_arithmetization::proof::{BlockHashes, BlockMetadata};
use futures::{StreamExt as _, TryStreamExt as _};
use trace_decoder::types::{BlockLevelData, OtherBlockData};

mod compat;
pub mod jerigon;
mod metadata;
pub mod native;
pub mod retry;

use compat::Compat;

/// Fetches other block data
async fn fetch_other_block_data<ProviderT, TransportT>(
provider: ProviderT,
target_block_id: BlockId,
checkpoint_block_id: BlockId,
) -> anyhow::Result<OtherBlockData>
where
ProviderT: Provider<TransportT>,
TransportT: Transport + Clone,
{
let target_block = provider
.get_block(target_block_id, BlockTransactionsKind::Hashes)
.await?
.context("target block does not exist")?;
let target_block_number = target_block
.header
.number
.context("target block is missing field `number`")?;
let chain_id = provider.get_chain_id().await?;
let checkpoint_state_trie_root = provider
.get_block(checkpoint_block_id, BlockTransactionsKind::Hashes)
.await?
.context("checkpoint block does not exist")?
.header
.state_root;

let mut prev_hashes = [alloy::primitives::B256::ZERO; 256];
let concurrency = prev_hashes.len();
futures::stream::iter(
prev_hashes
.iter_mut()
.rev() // fill RTL
.zip(std::iter::successors(Some(target_block_number), |it| {
it.checked_sub(1)
}))
.map(|(dst, n)| {
let provider = &provider;
async move {
let block = provider
.get_block(n.into(), BlockTransactionsKind::Hashes)
.await
.context("couldn't get block")?
.context("no such block")?;
*dst = block.header.parent_hash;
anyhow::Ok(())
}
}),
)
.buffered(concurrency)
.try_collect::<()>()
.await
.context("couldn't fill previous hashes")?;

let other_data = OtherBlockData {
b_data: BlockLevelData {
b_meta: BlockMetadata {
block_beneficiary: target_block.header.miner.compat(),
block_timestamp: target_block.header.timestamp.into(),
block_number: target_block_number.into(),
block_difficulty: target_block.header.difficulty.into(),
block_random: target_block
.header
.mix_hash
.context("target block is missing field `mix_hash`")?
.compat(),
block_gaslimit: target_block.header.gas_limit.into(),
block_chain_id: chain_id.into(),
block_base_fee: target_block
.header
.base_fee_per_gas
.context("target block is missing field `base_fee_per_gas`")?
.into(),
block_gas_used: target_block.header.gas_used.into(),
block_bloom: target_block.header.logs_bloom.compat(),
},
b_hashes: BlockHashes {
prev_hashes: prev_hashes.map(|it| it.compat()).into(),
cur_hash: target_block
.header
.hash
.context("target block is missing field `hash`")?
.compat(),
},
withdrawals: target_block
.withdrawals
.into_iter()
.flatten()
.map(
|Withdrawal {
address, amount, ..
}| { (address.compat(), amount.into()) },
)
.collect(),
},
checkpoint_state_trie_root: checkpoint_state_trie_root.compat(),
};
Ok(other_data)
}
109 changes: 0 additions & 109 deletions rpc/src/metadata.rs

This file was deleted.

34 changes: 0 additions & 34 deletions rpc/src/native/block.rs

This file was deleted.

Loading

0 comments on commit 2cbcf98

Please sign in to comment.