Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

29 changes: 26 additions & 3 deletions client/consensus/qpow/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,28 +139,51 @@ where
///
/// Returns `true` if the seal is valid for the current block, `false` otherwise.
/// Returns `false` if there's no current build.
/// Logs detailed information on failure for debugging.
pub fn verify_seal(&self, seal: &Seal) -> bool {
let build = self.build.lock();
let build = match build.as_ref() {
Some(b) => b,
None => return false,
None => {
warn!(target: LOG_TARGET, "verify_seal: No current build available");
return false;
},
};

// Convert seal to nonce [u8; 64]
let nonce: [u8; 64] = match seal.as_slice().try_into() {
Ok(arr) => arr,
Err(_) => {
warn!(target: LOG_TARGET, "Seal does not have exactly 64 bytes");
warn!(target: LOG_TARGET, "Seal does not have exactly 64 bytes, got {}", seal.len());
return false;
},
};

let pre_hash = build.metadata.pre_hash.0;
let best_hash = build.metadata.best_hash;
let difficulty = build.metadata.difficulty;
let extrinsic_count = build.proposal.block.extrinsics().len();

// Verify using runtime API
match self.client.runtime_api().verify_nonce_local_mining(best_hash, pre_hash, nonce) {
Ok(valid) => valid,
Ok(true) => true,
Ok(false) => {
log::error!(
target: LOG_TARGET,
"verify_seal FAILED:\n\
pre_hash (block template): {}\n\
best_hash (parent block): {}\n\
difficulty: {}\n\
nonce (seal): {}\n\
extrinsics in block: {}",
hex::encode(pre_hash),
best_hash,
difficulty,
hex::encode(nonce),
extrinsic_count,
);
false
},
Err(e) => {
warn!(target: LOG_TARGET, "Runtime API error verifying seal: {:?}", e);
false
Expand Down
28 changes: 21 additions & 7 deletions node/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,25 +184,39 @@ async fn handle_external_mining(
server.broadcast_job(job).await;

// Wait for results from miners, retrying on invalid seals
// Track both best_hash (parent) and pre_hash (block template) to detect rebuilds
let best_hash = metadata.best_hash;
let original_pre_hash = metadata.pre_hash;
loop {
let (miner_id, seal) = match wait_for_mining_result(server, &job_id, || {
// Interrupt if cancelled, parent block changed, OR block template was rebuilt
cancellation_token.is_cancelled() ||
worker_handle.metadata().map(|m| m.best_hash != best_hash).unwrap_or(true)
worker_handle
.metadata()
.map(|m| m.best_hash != best_hash || m.pre_hash != original_pre_hash)
.unwrap_or(true)
})
.await
{
Some(result) => result,
None => return ExternalMiningOutcome::Interrupted,
None => {
// Check why we were interrupted - log if it was a rebuild
if let Some(current) = worker_handle.metadata() {
if current.best_hash == best_hash && current.pre_hash != original_pre_hash {
log::info!(
"⛏️ Block template rebuilt while mining job {}. Old pre_hash: {}, New pre_hash: {}. Rebroadcasting...",
job_id,
hex::encode(original_pre_hash.as_bytes()),
hex::encode(current.pre_hash.as_bytes())
);
}
}
return ExternalMiningOutcome::Interrupted;
},
};

// Verify the seal before attempting to submit (submit consumes the build)
if !worker_handle.verify_seal(&seal) {
log::error!(
"🚨🚨🚨 INVALID SEAL FROM MINER {}! Job {} - seal failed verification. This may indicate a miner bug or stale work. Continuing to wait for valid seals...",
miner_id,
job_id
);
continue;
}

Expand Down
23 changes: 20 additions & 3 deletions pallets/mining-rewards/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ pub mod pallet {
};
use frame_system::pallet_prelude::*;
use pallet_treasury::TreasuryProvider;
use qp_poseidon::PoseidonHasher;
use qp_poseidon::{PoseidonHasher, ToFelts};
use qp_wormhole::TransferProofRecorder;
use sp_consensus_pow::POW_ENGINE_ID;
use sp_runtime::{generic::DigestItem, traits::Saturating, Permill};
Expand Down Expand Up @@ -194,8 +194,25 @@ pub mod pallet {
Err(_) => continue,
};

// Hash the preimage with Poseidon2 to derive the wormhole address
let wormhole_address_bytes = PoseidonHasher::hash_padded(&preimage);
// The preimage is the "first_hash" from wormhole derivation:
// first_hash = hash(salt + secret)
// To get the address, we need: address = hash(first_hash)
//
// We must use the same serialization as the ZK circuit:
// - Convert 32 bytes to 4 field elements using
// unsafe_digest_bytes_to_felts (8 bytes per element)
// - Hash without padding using hash_variable_length
let preimage_felts = preimage.to_felts();
let wormhole_address_bytes =
PoseidonHasher::hash_variable_length(preimage_felts);

// Log the preimage and derived address for debugging
log::debug!(
target: "mining-rewards",
"🔑 Wormhole derivation: preimage={:?} -> address={:?}",
preimage,
wormhole_address_bytes
);

// Convert to AccountId
if let Ok(miner) =
Expand Down
6 changes: 4 additions & 2 deletions pallets/mining-rewards/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use frame_support::{
parameter_types,
traits::{ConstU32, Everything, Hooks},
};
use qp_poseidon::PoseidonHasher;
use qp_poseidon::{PoseidonHasher, ToFelts};
use sp_consensus_pow::POW_ENGINE_ID;
use sp_runtime::{
app_crypto::sp_core,
Expand Down Expand Up @@ -130,8 +130,10 @@ pub fn miner_preimage(id: u8) -> [u8; 32] {
}

/// Helper function to derive wormhole address from preimage
/// Uses hash_variable_length with ToFelts, matching the logic in lib.rs
pub fn wormhole_address_from_preimage(preimage: [u8; 32]) -> sp_core::crypto::AccountId32 {
let hash = PoseidonHasher::hash_padded(&preimage);
let preimage_felts = preimage.to_felts();
let hash = PoseidonHasher::hash_variable_length(preimage_felts);
sp_core::crypto::AccountId32::from(hash)
}

Expand Down
1 change: 1 addition & 0 deletions pallets/mining-rewards/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,7 @@ fn test_fees_and_rewards_to_miner() {
}

#[test]
#[ignore] // This test takes a very long time (~120M blocks simulation), run manually with --ignored
fn test_emission_simulation_120m_blocks() {
new_test_ext().execute_with(|| {
// Add realistic initial supply similar to genesis
Expand Down
1 change: 1 addition & 0 deletions pallets/multisig/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ pallet-scheduler = { workspace = true, default-features = true }
pallet-timestamp.workspace = true
pallet-utility = { workspace = true, default-features = true }
qp-scheduler = { workspace = true, default-features = true }
qp-wormhole = { workspace = true, default-features = true }
sp-core.workspace = true
sp-io.workspace = true
sp-runtime = { workspace = true, default-features = true }
Expand Down
20 changes: 18 additions & 2 deletions pallets/multisig/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,15 +173,30 @@ impl<T> Time for MockTimestamp<T> {

parameter_types! {
pub const ReversibleTransfersPalletIdValue: PalletId = PalletId(*b"rtpallet");
pub const DefaultDelay: BlockNumberOrTimestamp<u64, u64> =
BlockNumberOrTimestamp::BlockNumber(10);
pub const DefaultDelay: BlockNumberOrTimestamp<u64, u64> = BlockNumberOrTimestamp::BlockNumber(10);
pub const MinDelayPeriodBlocks: u64 = 2;
pub const MinDelayPeriodMoment: u64 = 2000;
pub const MaxReversibleTransfers: u32 = 100;
pub const MaxInterceptorAccounts: u32 = 10;
pub const HighSecurityVolumeFee: Permill = Permill::from_percent(1);
}

/// Mock proof recorder that does nothing (for tests)
pub struct MockProofRecorder;

impl qp_wormhole::TransferProofRecorder<AccountId, u32, Balance> for MockProofRecorder {
type Error = ();

fn record_transfer_proof(
_asset_id: Option<u32>,
_from: AccountId,
_to: AccountId,
_amount: Balance,
) -> Result<(), Self::Error> {
Ok(())
}
}

impl pallet_reversible_transfers::Config for Test {
type SchedulerOrigin = OriginCaller;
type RuntimeHoldReason = RuntimeHoldReason;
Expand All @@ -198,6 +213,7 @@ impl pallet_reversible_transfers::Config for Test {
type TimeProvider = MockTimestamp<Test>;
type MaxInterceptorAccounts = MaxInterceptorAccounts;
type VolumeFee = HighSecurityVolumeFee;
type ProofRecorder = MockProofRecorder;
}

parameter_types! {
Expand Down
2 changes: 2 additions & 0 deletions pallets/reversible-transfers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pallet-balances.workspace = true
pallet-recovery.workspace = true
qp-high-security = { path = "../../primitives/high-security", default-features = false }
qp-scheduler.workspace = true
qp-wormhole.workspace = true
scale-info = { features = ["derive"], workspace = true }
sp-arithmetic.workspace = true
sp-runtime.workspace = true
Expand Down Expand Up @@ -56,6 +57,7 @@ std = [
"pallet-utility/std",
"qp-high-security/std",
"qp-scheduler/std",
"qp-wormhole/std",
"scale-info/std",
"sp-core/std",
"sp-io/std",
Expand Down
29 changes: 27 additions & 2 deletions pallets/reversible-transfers/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ use frame_support::{
};
use frame_system::pallet_prelude::*;
use qp_scheduler::{BlockNumberOrTimestamp, DispatchTime, ScheduleNamed};
use qp_wormhole::TransferProofRecorder;
use sp_arithmetic::Permill;
use sp_runtime::traits::StaticLookup;

Expand Down Expand Up @@ -202,6 +203,10 @@ pub mod pallet {
/// fees. The fee is burned (removed from total issuance).
#[pallet::constant]
type VolumeFee: Get<Permill>;

/// Proof recorder for storing wormhole transfer proofs.
/// This records transfer proofs when reversible transfers are executed.
type ProofRecorder: TransferProofRecorder<Self::AccountId, AssetIdOf<Self>, BalanceOf<Self>>;
}

/// Maps accounts to their chosen reversibility delay period (in milliseconds).
Expand Down Expand Up @@ -600,6 +605,16 @@ pub mod pallet {
let (call, _) = T::Preimages::realize::<RuntimeCallOf<T>>(&pending.call)
.map_err(|_| Error::<T>::CallDecodingFailed)?;

// Determine asset_id for transfer proof recording (None for native balance transfers)
let asset_id: Option<AssetIdOf<T>> =
if let Ok(pallet_assets::Call::transfer_keep_alive { id, .. }) =
call.clone().try_into()
{
Some(id.into())
} else {
None
};

// If this is an assets transfer, release the held amount before dispatch
if let Ok(pallet_assets::Call::transfer_keep_alive { id, .. }) = call.clone().try_into()
{
Expand All @@ -626,8 +641,18 @@ pub mod pallet {
// Remove transfer from all storage (handles indexes, account count, etc.)
Self::transfer_removed(&pending.from, *tx_id, &pending);

let post_info = call
.dispatch(frame_support::dispatch::RawOrigin::Signed(pending.from.clone()).into());
let post_info =
call.dispatch(frame_system::RawOrigin::Signed(pending.from.clone()).into());

// Record transfer proof if dispatch was successful
if post_info.is_ok() {
let _ = T::ProofRecorder::record_transfer_proof(
asset_id,
pending.from.clone(),
pending.to.clone(),
pending.amount,
);
}

// Emit event
Self::deposit_event(Event::TransactionExecuted { tx_id: *tx_id, result: post_info });
Expand Down
51 changes: 51 additions & 0 deletions pallets/reversible-transfers/src/tests/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,56 @@ parameter_types! {
pub const HighSecurityVolumeFee: Permill = Permill::from_percent(1);
}

/// Recorded transfer proof for testing
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RecordedTransferProof {
pub asset_id: Option<u32>,
pub from: AccountId,
pub to: AccountId,
pub amount: Balance,
}

thread_local! {
/// Storage for recorded transfer proofs (for test verification)
static RECORDED_PROOFS: RefCell<Vec<RecordedTransferProof>> = const { RefCell::new(Vec::new()) };
}

/// Mock proof recorder that tracks recorded proofs for test verification
pub struct MockProofRecorder;

impl MockProofRecorder {
/// Get all recorded transfer proofs
pub fn get_recorded_proofs() -> Vec<RecordedTransferProof> {
RECORDED_PROOFS.with(|proofs| proofs.borrow().clone())
}

/// Clear all recorded proofs (call at start of tests)
pub fn clear() {
RECORDED_PROOFS.with(|proofs| proofs.borrow_mut().clear());
}

/// Get the last recorded proof
pub fn last_proof() -> Option<RecordedTransferProof> {
RECORDED_PROOFS.with(|proofs| proofs.borrow().last().cloned())
}
}

impl qp_wormhole::TransferProofRecorder<AccountId, u32, Balance> for MockProofRecorder {
type Error = ();

fn record_transfer_proof(
asset_id: Option<u32>,
from: AccountId,
to: AccountId,
amount: Balance,
) -> Result<(), Self::Error> {
RECORDED_PROOFS.with(|proofs| {
proofs.borrow_mut().push(RecordedTransferProof { asset_id, from, to, amount });
});
Ok(())
}
}

impl pallet_reversible_transfers::Config for Test {
type SchedulerOrigin = OriginCaller;
type RuntimeHoldReason = RuntimeHoldReason;
Expand All @@ -214,6 +264,7 @@ impl pallet_reversible_transfers::Config for Test {
type TimeProvider = MockTimestamp<Test>;
type MaxInterceptorAccounts = MaxInterceptorAccounts;
type VolumeFee = HighSecurityVolumeFee;
type ProofRecorder = MockProofRecorder;
}

parameter_types! {
Expand Down
Loading